diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..cc482b310340fff63d0a0c39cbf7b908c6a76870 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,63 @@ +notebooks/ +data/ +.uploads/ +.venv/ +.env +sqlite-db/ +temp/ +google-credentials.json +docker-compose* +.docker_data/ +docs/ +surreal_data/ +surreal-data/ +notebook_data/ +temp/ +*.env +.git/ +.github/ + +# Frontend build artifacts and dependencies +frontend/node_modules/ +frontend/.next/ +frontend/.env.local + +# Cache directories (recursive patterns) +**/__pycache__/ +**/.mypy_cache/ +**/.ruff_cache/ +**/.pytest_cache/ +**/*.pyc +**/*.pyo +**/*.pyd +.coverage +.coverage.* +htmlcov/ +.tox/ +.nox/ +.cache/ +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ + +# IDE and editor files +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + + +.quarentena/ +surreal_single_data/ \ No newline at end of file diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..6f1d1da058b85de29ca1e13e618bc1f6b5af4764 --- /dev/null +++ b/.env.example @@ -0,0 +1,259 @@ + +# API CONFIGURATION +# URL where the API can be accessed by the browser +# This setting allows the frontend to connect to the API at runtime (no rebuild needed!) +# +# IMPORTANT: Do NOT include /api at the end - it will be added automatically! +# +# Common scenarios: +# - Docker on localhost: http://localhost:5055 (default, works for most cases) +# - Docker on LAN/remote server: http://192.168.1.100:5055 or http://your-server-ip:5055 +# - Behind reverse proxy with custom domain: https://your-domain.com +# - Behind reverse proxy with subdomain: https://api.your-domain.com +# +# Examples for reverse proxy users: +# - API_URL=https://notebook.example.com (frontend will call https://notebook.example.com/api/*) +# - API_URL=https://api.example.com (frontend will call https://api.example.com/api/*) +# +# Note: If not set, the system will auto-detect based on the incoming request. +# Only set this if you need to override the auto-detection (e.g., reverse proxy scenarios). +API_URL=http://localhost:5055 + +# INTERNAL API URL (Server-Side) +# URL where Next.js server-side should proxy API requests (via rewrites) +# This is DIFFERENT from API_URL which is used by the browser client +# +# INTERNAL_API_URL is used by Next.js rewrites to forward /api/* requests to the FastAPI backend +# API_URL is used by the browser to know where to make API calls +# +# Default: http://localhost:5055 (single-container deployment - both services on same host) +# Override for multi-container: INTERNAL_API_URL=http://api-service:5055 +# +# Common scenarios: +# - Single container (default): Don't set - defaults to http://localhost:5055 +# - Multi-container Docker Compose: INTERNAL_API_URL=http://api:5055 (use service name) +# - Kubernetes/advanced networking: INTERNAL_API_URL=http://api-service.namespace.svc.cluster.local:5055 +# +# Why two variables? +# - API_URL: External/public URL that browsers use (can be https://your-domain.com) +# - INTERNAL_API_URL: Internal container networking URL (usually http://localhost:5055 or service name) +# +# INTERNAL_API_URL=http://localhost:5055 + +# API CLIENT TIMEOUT (in seconds) +# Controls how long the frontend/Streamlit UI waits for API responses +# Increase this if you're using slow AI providers or hardware (Ollama on CPU, remote LM Studio, etc.) +# Default: 300 seconds (5 minutes) - sufficient for most transformation/insight operations +# +# Common scenarios: +# - Fast cloud APIs (OpenAI, Anthropic): 300 seconds is more than enough +# - Local Ollama on GPU: 300 seconds should work fine +# - Local Ollama on CPU: Consider 600 seconds (10 minutes) or more +# - Remote LM Studio over slow network: Consider 900 seconds (15 minutes) +# - Very large documents: May need 900+ seconds +# +# API_CLIENT_TIMEOUT=300 + +# ESPERANTO LLM TIMEOUT (in seconds) +# Controls the timeout for AI model API calls at the Esperanto library level +# This is separate from API_CLIENT_TIMEOUT and applies to the actual LLM provider requests +# Only increase this if you're experiencing timeouts during model inference itself +# Default: 60 seconds (built into Esperanto) +# +# Important: This should generally be LOWER than API_CLIENT_TIMEOUT to allow proper error handling +# +# Common scenarios: +# - Fast cloud APIs (OpenAI, Anthropic, Groq): 60 seconds is sufficient +# - Local Ollama with small models: 120-180 seconds may help +# - Local Ollama with large models on CPU: 300+ seconds +# - Remote or self-hosted LLMs: 180-300 seconds depending on hardware +# +# Note: If transformations complete but you see timeout errors, increase API_CLIENT_TIMEOUT first. +# Only increase ESPERANTO_LLM_TIMEOUT if the model itself is timing out during inference. +# +# ESPERANTO_LLM_TIMEOUT=60 + +# SSL VERIFICATION CONFIGURATION +# Configure SSL certificate verification for local AI providers (Ollama, LM Studio, etc.) +# behind reverse proxies with self-signed certificates +# +# Option 1: Custom CA Bundle (recommended for self-signed certs) +# Point to your CA certificate file to verify SSL while using custom certificates +# ESPERANTO_SSL_CA_BUNDLE=/path/to/your/ca-bundle.pem +# +# Option 2: Disable SSL Verification (development only) +# WARNING: Disabling SSL verification exposes you to man-in-the-middle attacks +# Only use in trusted development/testing environments +# ESPERANTO_SSL_VERIFY=false + +# SECURITY +# Set this to protect your Open Notebook instance with a password (for public hosting) +# OPEN_NOTEBOOK_PASSWORD= + +# OPENAI +# OPENAI_API_KEY= + + +# ANTHROPIC +# ANTHROPIC_API_KEY= + +# GEMINI +# this is the best model for long context and podcast generation +# GOOGLE_API_KEY= +# GEMINI_API_BASE_URL= # Optional: Override default endpoint (for Vertex AI, proxies, etc.) + +# VERTEXAI +# VERTEX_PROJECT=my-google-cloud-project-name +# GOOGLE_APPLICATION_CREDENTIALS=./google-credentials.json +# VERTEX_LOCATION=us-east5 + +# MISTRAL +# MISTRAL_API_KEY= + +# DEEPSEEK +# DEEPSEEK_API_KEY= + +# OLLAMA +# OLLAMA_API_BASE="http://10.20.30.20:11434" + +# OPEN ROUTER +# OPENROUTER_BASE_URL="https://openrouter.ai/api/v1" +# OPENROUTER_API_KEY= + +# GROQ +# GROQ_API_KEY= + +# XAI +# XAI_API_KEY= + +# ELEVENLABS +# Used only by the podcast feature +# ELEVENLABS_API_KEY= + +# TTS BATCH SIZE +# Controls concurrent TTS requests for podcast generation (default: 5) +# Lower values reduce provider load but increase generation time +# Recommended: OpenAI=5, ElevenLabs=2, Google=4, Custom=1 +# TTS_BATCH_SIZE=2 + +# VOYAGE AI +# VOYAGE_API_KEY= + +# OPENAI COMPATIBLE ENDPOINTS +# Generic configuration (applies to all modalities: language, embedding, STT, TTS) +# OPENAI_COMPATIBLE_BASE_URL=http://localhost:1234/v1 +# OPENAI_COMPATIBLE_API_KEY= + +# Mode-specific configuration (overrides generic if set) +# Use these when you want different endpoints for different capabilities +# OPENAI_COMPATIBLE_BASE_URL_LLM=http://localhost:1234/v1 +# OPENAI_COMPATIBLE_API_KEY_LLM= +# OPENAI_COMPATIBLE_BASE_URL_EMBEDDING=http://localhost:8080/v1 +# OPENAI_COMPATIBLE_API_KEY_EMBEDDING= +# OPENAI_COMPATIBLE_BASE_URL_STT=http://localhost:9000/v1 +# OPENAI_COMPATIBLE_API_KEY_STT= +# OPENAI_COMPATIBLE_BASE_URL_TTS=http://localhost:9000/v1 +# OPENAI_COMPATIBLE_API_KEY_TTS= + +# AZURE OPENAI +# Generic configuration (applies to all modalities: language, embedding, STT, TTS) +# AZURE_OPENAI_API_KEY= +# AZURE_OPENAI_ENDPOINT= +# AZURE_OPENAI_API_VERSION=2024-12-01-preview + +# Mode-specific configuration (overrides generic if set) +# Use these when you want different deployments for different AI capabilities +# AZURE_OPENAI_API_KEY_LLM= +# AZURE_OPENAI_ENDPOINT_LLM= +# AZURE_OPENAI_API_VERSION_LLM= + +# AZURE_OPENAI_API_KEY_EMBEDDING= +# AZURE_OPENAI_ENDPOINT_EMBEDDING= +# AZURE_OPENAI_API_VERSION_EMBEDDING= + +# AZURE_OPENAI_API_KEY_STT= +# AZURE_OPENAI_ENDPOINT_STT= +# AZURE_OPENAI_API_VERSION_STT= + +# AZURE_OPENAI_API_KEY_TTS= +# AZURE_OPENAI_ENDPOINT_TTS= +# AZURE_OPENAI_API_VERSION_TTS= + +# USE THIS IF YOU WANT TO DEBUG THE APP ON LANGSMITH +# LANGCHAIN_TRACING_V2=true +# LANGCHAIN_ENDPOINT="https://api.smith.langchain.com" +# LANGCHAIN_API_KEY= +# LANGCHAIN_PROJECT="Open Notebook" + +# CONNECTION DETAILS FOR YOUR SURREAL DB +# New format (preferred) - WebSocket URL +SURREAL_URL="ws://surrealdb/rpc:8000" +SURREAL_USER="root" +SURREAL_PASSWORD="root" +SURREAL_NAMESPACE="open_notebook" +SURREAL_DATABASE="staging" + +# RETRY CONFIGURATION (surreal-commands v1.2.0+) +# Global defaults for all background commands unless explicitly overridden at command level +# These settings help commands automatically recover from transient failures like: +# - Database transaction conflicts during concurrent operations +# - Network timeouts when calling external APIs +# - Rate limits from LLM/embedding providers +# - Temporary resource unavailability + +# Enable/disable retry globally (default: true) +# Set to false to disable retries for all commands (useful for debugging) +SURREAL_COMMANDS_RETRY_ENABLED=true + +# Maximum retry attempts before giving up (default: 3) +# Database operations use 5 attempts (defined per-command) +# API calls use 3 attempts (defined per-command) +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 + +# Wait strategy between retry attempts (default: exponential_jitter) +# Options: exponential_jitter, exponential, fixed, random +# - exponential_jitter: Recommended - prevents thundering herd during DB conflicts +# - exponential: Good for API rate limits (predictable backoff) +# - fixed: Use for quick recovery scenarios +# - random: Use when you want unpredictable retry timing +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter + +# Minimum wait time between retries in seconds (default: 1) +# Database conflicts: 1 second (fast retry for transient issues) +# API rate limits: 5 seconds (wait for quota reset) +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 + +# Maximum wait time between retries in seconds (default: 30) +# Database conflicts: 30 seconds maximum +# API rate limits: 120 seconds maximum (defined per-command) +# Total retry time won't exceed max_attempts * wait_max +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 + +# WORKER CONCURRENCY +# Maximum number of concurrent tasks in the worker pool (default: 5) +# This affects the likelihood of database transaction conflicts during batch operations +# +# Tuning guidelines based on deployment size: +# - Resource-constrained (low CPU/memory): 1-2 workers +# Reduces conflicts and resource usage, but slower processing +# +# - Normal deployment (balanced): 5 workers (RECOMMENDED) +# Good balance between throughput and conflict rate +# Retry logic handles occasional conflicts gracefully +# +# - Large instances (high CPU/memory): 10-20 workers +# Higher throughput but more frequent DB conflicts +# Relies heavily on retry logic with jittered backoff +# +# Note: Higher concurrency increases vectorization speed but also increases +# SurrealDB transaction conflicts. The retry logic with exponential-jitter +# backoff ensures operations complete successfully even at high concurrency. +SURREAL_COMMANDS_MAX_TASKS=5 + +# OPEN_NOTEBOOK_PASSWORD= + +# FIRECRAWL - Get a key at https://firecrawl.dev/ +FIRECRAWL_API_KEY= + +# JINA - Get a key at https://jina.ai/ +JINA_API_KEY= \ No newline at end of file diff --git a/.env.railway b/.env.railway new file mode 100644 index 0000000000000000000000000000000000000000..b8b3d9df2f20c9540ac56267636f3774369289a4 --- /dev/null +++ b/.env.railway @@ -0,0 +1,125 @@ +# Railway Deployment Environment Variables +# Copy these to your Railway service's Variables section + +# ============================================ +# DATABASE CONNECTION (Single Container) +# ============================================ +# Use 127.0.0.1 for Railway single-container deployment +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test + +# ============================================ +# API CONFIGURATION +# ============================================ +# INTERNAL_API_URL: Used by Next.js server-side to proxy to FastAPI +INTERNAL_API_URL=http://127.0.0.1:5055 + +# API_URL: Public URL - SET THIS AFTER FIRST DEPLOY +# Replace YOUR_RAILWAY_APP_URL with your actual Railway app URL +# Format: https://your-app-name.up.railway.app (no /api at the end) +API_URL=https://YOUR_RAILWAY_APP_URL + +# ============================================ +# WORKER & RETRY CONFIGURATION +# ============================================ +# Background worker concurrency (default: 5) +SURREAL_COMMANDS_MAX_TASKS=5 + +# Retry configuration for resilient background tasks +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 + +# ============================================ +# AI MODEL API KEYS (Configured for FREE tier) +# ============================================ + +# Groq (for chat, transformations, insights - FREE) +GROQ_API_KEY=your_groq_api_key_here + +# Google Gemini (for embeddings, long context - FREE) +GOOGLE_API_KEY=your_google_api_key_here + +# Llama (if using via Ollama or another provider) +# If using Ollama locally/remote, set the base URL: +# OLLAMA_API_BASE=http://your-ollama-host:11434 + +# OpenAI (optional - for GPT models, embeddings, TTS) +# OPENAI_API_KEY=sk-your_openai_key_here + +# Anthropic (optional - for Claude models) +# ANTHROPIC_API_KEY=sk-ant-your_anthropic_key_here + +# Mistral (optional - for Mistral models) +# MISTRAL_API_KEY=your_mistral_key_here + +# DeepSeek (optional - for DeepSeek models) +# DEEPSEEK_API_KEY=your_deepseek_key_here + +# XAI (optional - for Grok models) +# XAI_API_KEY=your_xai_key_here + +# OpenRouter (optional - access multiple models via one API) +# OPENROUTER_API_KEY=your_openrouter_key_here +# OPENROUTER_BASE_URL=https://openrouter.ai/api/v1 + +# ============================================ +# PODCAST FEATURES (Optional) +# ============================================ +# ElevenLabs for high-quality text-to-speech +# ELEVENLABS_API_KEY=your_elevenlabs_key_here + +# TTS batch size (adjust based on provider) +# OpenAI/Google: 5, ElevenLabs: 2, Custom: 1 +# TTS_BATCH_SIZE=5 + +# ============================================ +# EMBEDDINGS (Optional - if not using default) +# ============================================ +# Voyage AI for advanced embeddings +# VOYAGE_API_KEY=your_voyage_key_here + +# ============================================ +# WEB SCRAPING (Optional) +# ============================================ +# Firecrawl for enhanced web scraping +# FIRECRAWL_API_KEY=your_firecrawl_key_here + +# Jina AI for web reading and embeddings +# JINA_API_KEY=your_jina_key_here + +# ============================================ +# SECURITY (Optional but Recommended) +# ============================================ +# Protect your instance with a password for public hosting +# OPEN_NOTEBOOK_PASSWORD=your_secure_password_here + +# ============================================ +# ADVANCED: TIMEOUT CONFIGURATION (Optional) +# ============================================ +# Only adjust these if you experience timeout issues + +# API client timeout (seconds) - how long frontend waits for responses +# Default: 300 (5 minutes) +# Increase for slow models or large documents +# API_CLIENT_TIMEOUT=300 + +# LLM provider timeout (seconds) - how long to wait for AI model response +# Default: 60 seconds +# Increase for slow local models (Ollama on CPU, etc.) +# ESPERANTO_LLM_TIMEOUT=60 + +# ============================================ +# NOTES FOR RAILWAY DEPLOYMENT +# ============================================ +# 1. PORT variable is automatically set by Railway - DO NOT override it +# 2. Railway will expose your app on the PORT it assigns (usually 8080) +# 3. Set API_URL AFTER your first deploy when you get your Railway domain +# 4. Use 127.0.0.1 (not localhost) for internal connections +# 5. Keep database and API settings as-is for single container deployment + diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000000000000000000000000000000000000..1da9bd5a522674c81e1fff9dbd6167b610b918bc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,104 @@ +name: πŸ› Bug Report +description: Report a bug or unexpected behavior (app is running but misbehaving) +title: "[Bug]: " +labels: ["bug", "needs-triage"] +body: + - type: markdown + attributes: + value: | + Thanks for reporting a bug! Please fill out the information below to help us understand and fix the issue. + + **Note**: If you're having installation or setup issues, please use the "Installation Issue" template instead. + + - type: textarea + id: what-happened + attributes: + label: What did you do when it broke? + description: Describe the steps you took that led to the bug + placeholder: | + 1. I went to the Notebooks page + 2. I clicked on "Create New Notebook" + 3. I filled in the form and clicked "Save" + 4. Then the error occurred... + validations: + required: true + + - type: textarea + id: how-broke + attributes: + label: How did it break? + description: What happened that was unexpected? What did you expect to happen instead? + placeholder: | + Expected: The notebook should be created and I should see it in the list + Actual: I got an error message saying "Failed to create notebook" + validations: + required: true + + - type: textarea + id: logs-screenshots + attributes: + label: Logs or Screenshots + description: | + Please provide any error messages, logs, or screenshots that might help us understand the issue. + + **How to get logs:** + - Docker: `docker compose logs -f open_notebook` + - Check browser console (F12 β†’ Console tab) + placeholder: | + Paste logs here or drag and drop screenshots. + + Error messages, stack traces, or browser console errors are very helpful! + validations: + required: false + + - type: dropdown + id: version + attributes: + label: Open Notebook Version + description: Which version are you using? + options: + - v1-latest (Docker) + - v1-latest-single (Docker) + - Latest from main branch + - Other (please specify in additional context) + validations: + required: true + + - type: textarea + id: environment + attributes: + label: Environment + description: What environment are you running in? + placeholder: | + - OS: Ubuntu 22.04 / Windows 11 / macOS 14 + - Browser: Chrome 120 + validations: + required: false + + - type: textarea + id: additional-context + attributes: + label: Additional Context + description: Any other information that might be helpful + placeholder: "This started happening after I upgraded to v1.5.0..." + validations: + required: false + + - type: checkboxes + id: willing-to-contribute + attributes: + label: Contribution + description: Would you like to work on fixing this bug? + options: + - label: I am a developer and would like to work on fixing this issue (pending maintainer approval) + required: false + + - type: markdown + attributes: + value: | + --- + **Next Steps:** + 1. A maintainer will review your bug report + 2. If you checked the box above and want to fix it, please propose your solution approach + 3. Wait for assignment before starting development + 4. See our [Contributing Guide](https://github.com/lfnovo/open-notebook/blob/main/CONTRIBUTING.md) for more details diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..630377fef698a8ddb1235ac8d0e8d79261fb47e9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: false +contact_links: + - name: πŸ’¬ Discord Community + url: https://discord.gg/37XJPXfz2w + about: Get help from the community and share ideas + - name: πŸ€– Installation Assistant (ChatGPT) + url: https://chatgpt.com/g/g-68776e2765b48191bd1bae3f30212631-open-notebook-installation-assistant + about: CustomGPT that knows all our docs. Really useful. Try it. + - name: πŸ“š Documentation + url: https://github.com/lfnovo/open-notebook/tree/main/docs + about: Browse our comprehensive documentation diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000000000000000000000000000000000000..912f1f12205d190594de2f0353aed53ef2d15375 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,65 @@ +name: ✨ Feature Suggestion +description: Suggest a new feature or improvement for Open Notebook +title: "[Feature]: " +labels: ["enhancement", "needs-triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to suggest a feature! Your ideas help make Open Notebook better for everyone. + + - type: textarea + id: feature-description + attributes: + label: Feature Description + description: What feature would you like to see added or improved? + placeholder: "I would like to be able to..." + validations: + required: true + + - type: textarea + id: why-helpful + attributes: + label: Why would this be helpful? + description: Explain how this feature would benefit you and other users + placeholder: "This would help because..." + validations: + required: true + + - type: textarea + id: proposed-solution + attributes: + label: Proposed Solution (Optional) + description: If you have ideas on how to implement this feature, please share them + placeholder: "This could be implemented by..." + validations: + required: false + + - type: textarea + id: additional-context + attributes: + label: Additional Context + description: Any other context, screenshots, or examples that might be helpful + placeholder: "For example, other tools do this by..." + validations: + required: false + + - type: checkboxes + id: willing-to-contribute + attributes: + label: Contribution + description: Would you like to work on implementing this feature? + options: + - label: I am a developer and would like to work on implementing this feature (pending maintainer approval) + required: false + + - type: markdown + attributes: + value: | + --- + **Next Steps:** + 1. A maintainer will review your feature request + 2. If approved and you checked the box above, the issue will be assigned to you + 3. Please wait for assignment before starting development + 4. See our [Contributing Guide](https://github.com/lfnovo/open-notebook/blob/main/CONTRIBUTING.md) for more details + diff --git a/.github/ISSUE_TEMPLATE/installation_issue.yml b/.github/ISSUE_TEMPLATE/installation_issue.yml new file mode 100644 index 0000000000000000000000000000000000000000..155e2abd8885c804a3302331d3de688491f623c6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/installation_issue.yml @@ -0,0 +1,148 @@ +name: πŸ”§ Installation Issue +description: Report problems with installation, setup, or connectivity +title: "[Install]: " +labels: ["installation", "needs-triage"] +body: + - type: markdown + attributes: + value: | + ## ⚠️ Before You Continue + + **Please try these resources first:** + + 1. πŸ€– **[Installation Assistant ChatGPT](https://chatgpt.com/g/g-68776e2765b48191bd1bae3f30212631-open-notebook-installation-assistant)** - Our AI assistant can help you troubleshoot most installation issues instantly! + + 2. πŸ“š **[Installation Guide](https://github.com/lfnovo/open-notebook/blob/main/docs/getting-started/installation.md)** - Comprehensive setup instructions + + 3. πŸ‹ **[Docker Deployment Guide](https://github.com/lfnovo/open-notebook/blob/main/docs/deployment/docker.md)** - Detailed Docker setup + + 4. πŸ¦™ **Ollama Issues?** Read our [Ollama Guide](https://github.com/lfnovo/open-notebook/blob/main/docs/features/ollama.md) first + + 5. πŸ’¬ **[Discord Community](https://discord.gg/37XJPXfz2w)** - Get real-time help from the community + + --- + + If you've tried the above and still need help, please fill out the form below with as much detail as possible. + + - type: dropdown + id: installation-method + attributes: + label: Installation Method + description: How are you trying to install Open Notebook? + options: + - Docker (single container - v1-latest-single) + - Docker (multi-container - docker-compose) + - Local development (make start-all) + - Other (please specify below) + validations: + required: true + + - type: textarea + id: issue-description + attributes: + label: What is the issue? + description: Describe the installation or setup problem you're experiencing + placeholder: | + Example: "I can't connect to the database" or "The container won't start" or "Getting 404 errors when accessing the UI" + validations: + required: true + + - type: textarea + id: logs + attributes: + label: Logs + description: | + Please provide relevant logs. **This is very important for diagnosing issues!** + + **How to get logs:** + - Docker single container: `docker logs open-notebook` + - Docker Compose: `docker compose logs -f` + - Specific service: `docker compose logs -f open_notebook` + placeholder: | + Paste your logs here. Include the full error message and stack trace if available. + render: shell + validations: + required: false + + - type: textarea + id: docker-compose + attributes: + label: Docker Compose Configuration + description: | + If using Docker Compose, please paste your `docker-compose.yml` file here. + + **⚠️ IMPORTANT: Redact any sensitive information (API keys, passwords, etc.)** + placeholder: | + services: + open_notebook: + image: lfnovo/open_notebook:v1-latest-single + ports: + - "8502:8502" + - "5055:5055" + environment: + - OPENAI_API_KEY=sk-***REDACTED*** + ... + render: yaml + validations: + required: false + + - type: textarea + id: env-file + attributes: + label: Environment File + description: | + If using an `.env` or `docker.env` file, please paste it here. + + **⚠️ IMPORTANT: REDACT ALL API KEYS AND PASSWORDS!** + placeholder: | + SURREAL_URL=ws://surrealdb:8000/rpc + SURREAL_USER=root + SURREAL_PASSWORD=***REDACTED*** + OPENAI_API_KEY=sk-***REDACTED*** + ANTHROPIC_API_KEY=sk-ant-***REDACTED*** + render: shell + validations: + required: false + + - type: textarea + id: system-info + attributes: + label: System Information + description: Tell us about your setup + placeholder: | + - Operating System: Ubuntu 22.04 / Windows 11 / macOS 14 + - Docker version: `docker --version` + - Docker Compose version: `docker compose version` + - Architecture: amd64 / arm64 (Apple Silicon) + - Available disk space: `df -h` + - Available memory: `free -h` (Linux) or Activity Monitor (Mac) + validations: + required: false + + - type: textarea + id: additional-context + attributes: + label: Additional Context + description: Any other information that might be helpful + placeholder: | + - Are you behind a corporate proxy or firewall? + - Are you using a VPN? + - Have you made any custom modifications? + - Did this work before and suddenly break? + validations: + required: false + + - type: checkboxes + id: checklist + attributes: + label: Pre-submission Checklist + description: Please confirm you've tried these steps + options: + - label: I tried the [Installation Assistant ChatGPT](https://chatgpt.com/g/g-68776e2765b48191bd1bae3f30212631-open-notebook-installation-assistant) + required: false + - label: I read the relevant documentation ([Installation Guide](https://github.com/lfnovo/open-notebook/blob/main/docs/getting-started/installation.md) or [Ollama Guide](https://github.com/lfnovo/open-notebook/blob/main/docs/features/ollama.md)) + required: false + - label: I searched existing issues to see if this was already reported + required: true + - label: I redacted all sensitive information (API keys, passwords, etc.) + required: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000000000000000000000000000000000..71e18da6b0de67b96e38a04cce784b2f1d1e7f07 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,107 @@ +## Description + + + +## Related Issue + + + +Fixes # + +## Type of Change + + + +- [ ] Bug fix (non-breaking change that fixes an issue) +- [ ] New feature (non-breaking change that adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] Documentation update +- [ ] Code refactoring (no functional changes) +- [ ] Performance improvement +- [ ] Test coverage improvement + +## How Has This Been Tested? + + + +- [ ] Tested locally with Docker +- [ ] Tested locally with development setup +- [ ] Added new unit tests +- [ ] Existing tests pass (`uv run pytest`) +- [ ] Manual testing performed (describe below) + +**Test Details:** + + +## Design Alignment + + + +**Which design principles does this PR support?** (See [DESIGN_PRINCIPLES.md](../DESIGN_PRINCIPLES.md)) + +- [ ] Privacy First +- [ ] Simplicity Over Features +- [ ] API-First Architecture +- [ ] Multi-Provider Flexibility +- [ ] Extensibility Through Standards +- [ ] Async-First for Performance + +**Explanation:** + + +## Checklist + + + +### Code Quality +- [ ] My code follows PEP 8 style guidelines (Python) +- [ ] My code follows TypeScript best practices (Frontend) +- [ ] I have added type hints to my code (Python) +- [ ] I have added JSDoc comments where appropriate (TypeScript) +- [ ] I have performed a self-review of my code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] My changes generate no new warnings or errors + +### Testing +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] I ran linting: `make ruff` or `ruff check . --fix` +- [ ] I ran type checking: `make lint` or `uv run python -m mypy .` + +### Documentation +- [ ] I have updated the relevant documentation in `/docs` (if applicable) +- [ ] I have added/updated docstrings for new/modified functions +- [ ] I have updated the API documentation (if API changes were made) +- [ ] I have added comments to complex logic + +### Database Changes +- [ ] I have created migration scripts for any database schema changes (in `/migrations`) +- [ ] Migration includes both up and down scripts +- [ ] Migration has been tested locally + +### Breaking Changes +- [ ] This PR includes breaking changes +- [ ] I have documented the migration path for users +- [ ] I have updated MIGRATION.md (if applicable) + +## Screenshots (if applicable) + + + +## Additional Context + + + +## Pre-Submission Verification + +Before submitting, please verify: + +- [ ] I have read [CONTRIBUTING.md](../CONTRIBUTING.md) +- [ ] I have read [DESIGN_PRINCIPLES.md](../DESIGN_PRINCIPLES.md) +- [ ] This PR addresses an approved issue that was assigned to me +- [ ] I have not included unrelated changes in this PR +- [ ] My PR title follows conventional commits format (e.g., "feat: add user authentication") + +--- + +**Thank you for contributing to Open Notebook!** πŸŽ‰ diff --git a/.github/workflows/build-and-release.yml b/.github/workflows/build-and-release.yml new file mode 100644 index 0000000000000000000000000000000000000000..6c91f85a791d4cbfaa1087a625dd6f376adf76a0 --- /dev/null +++ b/.github/workflows/build-and-release.yml @@ -0,0 +1,298 @@ +name: Build and Release + +on: + workflow_dispatch: + inputs: + push_latest: + description: 'Also push v1-latest tags' + required: true + default: false + type: boolean + release: + types: [published] + +permissions: + contents: read + packages: write + +env: + GHCR_IMAGE: ghcr.io/lfnovo/open-notebook + DOCKERHUB_IMAGE: lfnovo/open_notebook + +jobs: + extract-version: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.version.outputs.version }} + has_dockerhub_secrets: ${{ steps.check.outputs.has_dockerhub_secrets }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Extract version from pyproject.toml + id: version + run: | + VERSION=$(grep -m1 '^version = ' pyproject.toml | cut -d'"' -f2) + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Extracted version: $VERSION" + + - name: Check for Docker Hub credentials + id: check + env: + SECRET_DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + SECRET_DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + run: | + if [[ -n ""$SECRET_DOCKER_USERNAME"" && -n ""$SECRET_DOCKER_PASSWORD"" ]]; then + echo "has_dockerhub_secrets=true" >> $GITHUB_OUTPUT + echo "Docker Hub credentials available" + else + echo "has_dockerhub_secrets=false" >> $GITHUB_OUTPUT + echo "Docker Hub credentials not available - will only push to GHCR" + fi + + build-regular: + needs: extract-version + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Free up disk space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /usr/local/lib/android + sudo rm -rf /opt/ghc + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo docker image prune --all --force + df -h + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Login to Docker Hub + if: needs.extract-version.outputs.has_dockerhub_secrets == 'true' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-regular-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx-regular- + + - name: Prepare Docker tags for regular build + id: tags-regular + env: + ENV_GHCR_IMAGE: ${{ env.GHCR_IMAGE }} + GITHUB_EVENT_INPUTS_PUSH_LATEST: ${{ github.event.inputs.push_latest }} + GITHUB_EVENT_NAME: ${{ github.event_name }} + GITHUB_EVENT_RELEASE_PRERELEASE: ${{ github.event.release.prerelease }} + ENV_DOCKERHUB_IMAGE: ${{ env.DOCKERHUB_IMAGE }} + run: | + TAGS=""$ENV_GHCR_IMAGE":${{ needs.extract-version.outputs.version }}" + + # Determine if we should push latest tags + PUSH_LATEST=""$GITHUB_EVENT_INPUTS_PUSH_LATEST"" + if [[ -z "$PUSH_LATEST" ]]; then + PUSH_LATEST="false" + fi + + # Add GHCR latest tag if requested or for non-prerelease releases + if [[ "$PUSH_LATEST" == "true" ]] || [[ ""$GITHUB_EVENT_NAME"" == "release" && ""$GITHUB_EVENT_RELEASE_PRERELEASE"" != "true" ]]; then + TAGS="${TAGS},"$ENV_GHCR_IMAGE":v1-latest" + fi + + # Add Docker Hub tags if credentials available + if [[ "${{ needs.extract-version.outputs.has_dockerhub_secrets }}" == "true" ]]; then + TAGS="${TAGS},"$ENV_DOCKERHUB_IMAGE":${{ needs.extract-version.outputs.version }}" + + if [[ "$PUSH_LATEST" == "true" ]] || [[ ""$GITHUB_EVENT_NAME"" == "release" && ""$GITHUB_EVENT_RELEASE_PRERELEASE"" != "true" ]]; then + TAGS="${TAGS},"$ENV_DOCKERHUB_IMAGE":v1-latest" + fi + fi + + echo "tags=${TAGS}" >> $GITHUB_OUTPUT + echo "Generated tags: ${TAGS}" + + - name: Build and push regular image + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.tags-regular.outputs.tags }} + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max + + - name: Move cache + run: | + rm -rf /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache + + build-single: + needs: extract-version + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Free up disk space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /usr/local/lib/android + sudo rm -rf /opt/ghc + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo docker image prune --all --force + df -h + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Login to Docker Hub + if: needs.extract-version.outputs.has_dockerhub_secrets == 'true' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache-single + key: ${{ runner.os }}-buildx-single-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx-single- + + - name: Prepare Docker tags for single build + id: tags-single + env: + ENV_GHCR_IMAGE: ${{ env.GHCR_IMAGE }} + GITHUB_EVENT_INPUTS_PUSH_LATEST: ${{ github.event.inputs.push_latest }} + GITHUB_EVENT_NAME: ${{ github.event_name }} + GITHUB_EVENT_RELEASE_PRERELEASE: ${{ github.event.release.prerelease }} + ENV_DOCKERHUB_IMAGE: ${{ env.DOCKERHUB_IMAGE }} + run: | + TAGS=""$ENV_GHCR_IMAGE":${{ needs.extract-version.outputs.version }}-single" + + # Determine if we should push latest tags + PUSH_LATEST=""$GITHUB_EVENT_INPUTS_PUSH_LATEST"" + if [[ -z "$PUSH_LATEST" ]]; then + PUSH_LATEST="false" + fi + + # Add GHCR latest tag if requested or for non-prerelease releases + if [[ "$PUSH_LATEST" == "true" ]] || [[ ""$GITHUB_EVENT_NAME"" == "release" && ""$GITHUB_EVENT_RELEASE_PRERELEASE"" != "true" ]]; then + TAGS="${TAGS},"$ENV_GHCR_IMAGE":v1-latest-single" + fi + + # Add Docker Hub tags if credentials available + if [[ "${{ needs.extract-version.outputs.has_dockerhub_secrets }}" == "true" ]]; then + TAGS="${TAGS},"$ENV_DOCKERHUB_IMAGE":${{ needs.extract-version.outputs.version }}-single" + + if [[ "$PUSH_LATEST" == "true" ]] || [[ ""$GITHUB_EVENT_NAME"" == "release" && ""$GITHUB_EVENT_RELEASE_PRERELEASE"" != "true" ]]; then + TAGS="${TAGS},"$ENV_DOCKERHUB_IMAGE":v1-latest-single" + fi + fi + + echo "tags=${TAGS}" >> $GITHUB_OUTPUT + echo "Generated tags: ${TAGS}" + + - name: Build and push single-container image + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile.single + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.tags-single.outputs.tags }} + cache-from: type=local,src=/tmp/.buildx-cache-single + cache-to: type=local,dest=/tmp/.buildx-cache-single-new,mode=max + + - name: Move cache + run: | + rm -rf /tmp/.buildx-cache-single + mv /tmp/.buildx-cache-single-new /tmp/.buildx-cache-single + + summary: + needs: [extract-version, build-regular, build-single] + runs-on: ubuntu-latest + if: always() + steps: + - name: Build Summary + env: + GITHUB_EVENT_INPUTS_PUSH_LATEST_____FALSE_: ${{ github.event.inputs.push_latest || 'false' }} + ENV_GHCR_IMAGE: ${{ env.GHCR_IMAGE }} + ENV_DOCKERHUB_IMAGE: ${{ env.DOCKERHUB_IMAGE }} + GITHUB_EVENT_INPUTS_PUSH_LATEST: ${{ github.event.inputs.push_latest }} + run: | + echo "## Build Summary" >> $GITHUB_STEP_SUMMARY + echo "**Version:** ${{ needs.extract-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "**Push v1-Latest:** "$GITHUB_EVENT_INPUTS_PUSH_LATEST_____FALSE_"" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Registries:" >> $GITHUB_STEP_SUMMARY + echo "βœ… **GHCR:** \`"$ENV_GHCR_IMAGE"\`" >> $GITHUB_STEP_SUMMARY + if [[ "${{ needs.extract-version.outputs.has_dockerhub_secrets }}" == "true" ]]; then + echo "βœ… **Docker Hub:** \`"$ENV_DOCKERHUB_IMAGE"\`" >> $GITHUB_STEP_SUMMARY + else + echo "⏭️ **Docker Hub:** Skipped (credentials not configured)" >> $GITHUB_STEP_SUMMARY + fi + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Images Built:" >> $GITHUB_STEP_SUMMARY + + if [[ "${{ needs.build-regular.result }}" == "success" ]]; then + echo "βœ… **Regular (GHCR):** \`"$ENV_GHCR_IMAGE":${{ needs.extract-version.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY + if [[ ""$GITHUB_EVENT_INPUTS_PUSH_LATEST"" == "true" ]]; then + echo "βœ… **Regular v1-Latest (GHCR):** \`"$ENV_GHCR_IMAGE":v1-latest\`" >> $GITHUB_STEP_SUMMARY + fi + if [[ "${{ needs.extract-version.outputs.has_dockerhub_secrets }}" == "true" ]]; then + echo "βœ… **Regular (Docker Hub):** \`"$ENV_DOCKERHUB_IMAGE":${{ needs.extract-version.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY + if [[ ""$GITHUB_EVENT_INPUTS_PUSH_LATEST"" == "true" ]]; then + echo "βœ… **Regular v1-Latest (Docker Hub):** \`"$ENV_DOCKERHUB_IMAGE":v1-latest\`" >> $GITHUB_STEP_SUMMARY + fi + fi + elif [[ "${{ needs.build-regular.result }}" == "skipped" ]]; then + echo "⏭️ **Regular:** Skipped" >> $GITHUB_STEP_SUMMARY + else + echo "❌ **Regular:** Failed" >> $GITHUB_STEP_SUMMARY + fi + + if [[ "${{ needs.build-single.result }}" == "success" ]]; then + echo "βœ… **Single (GHCR):** \`"$ENV_GHCR_IMAGE":${{ needs.extract-version.outputs.version }}-single\`" >> $GITHUB_STEP_SUMMARY + if [[ ""$GITHUB_EVENT_INPUTS_PUSH_LATEST"" == "true" ]]; then + echo "βœ… **Single v1-Latest (GHCR):** \`"$ENV_GHCR_IMAGE":v1-latest-single\`" >> $GITHUB_STEP_SUMMARY + fi + if [[ "${{ needs.extract-version.outputs.has_dockerhub_secrets }}" == "true" ]]; then + echo "βœ… **Single (Docker Hub):** \`"$ENV_DOCKERHUB_IMAGE":${{ needs.extract-version.outputs.version }}-single\`" >> $GITHUB_STEP_SUMMARY + if [[ ""$GITHUB_EVENT_INPUTS_PUSH_LATEST"" == "true" ]]; then + echo "βœ… **Single v1-Latest (Docker Hub):** \`"$ENV_DOCKERHUB_IMAGE":v1-latest-single\`" >> $GITHUB_STEP_SUMMARY + fi + fi + elif [[ "${{ needs.build-single.result }}" == "skipped" ]]; then + echo "⏭️ **Single:** Skipped" >> $GITHUB_STEP_SUMMARY + else + echo "❌ **Single:** Failed" >> $GITHUB_STEP_SUMMARY + fi + + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Platforms:" >> $GITHUB_STEP_SUMMARY + echo "- linux/amd64" >> $GITHUB_STEP_SUMMARY + echo "- linux/arm64" >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/.github/workflows/build-dev.yml b/.github/workflows/build-dev.yml new file mode 100644 index 0000000000000000000000000000000000000000..c483cc7672701129941f3a05403f829933ab2e78 --- /dev/null +++ b/.github/workflows/build-dev.yml @@ -0,0 +1,157 @@ +name: Development Build + +on: + pull_request: + branches: [ main ] + push: + branches: [ main ] + paths-ignore: + - '**.md' + - 'docs/**' + - 'notebooks/**' + - '.github/workflows/claude*.yml' + workflow_dispatch: + inputs: + dockerfile: + description: 'Dockerfile to test' + required: true + default: 'both' + type: choice + options: + - both + - regular + - single + platform: + description: 'Platform to build' + required: true + default: 'linux/amd64' + type: choice + options: + - linux/amd64 + - linux/arm64 + - linux/amd64,linux/arm64 + +env: + REGISTRY: docker.io + IMAGE_NAME: lfnovo/open_notebook + +jobs: + extract-version: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.version.outputs.version }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Extract version from pyproject.toml + id: version + run: | + VERSION=$(grep -m1 '^version = ' pyproject.toml | cut -d'"' -f2) + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Extracted version: $VERSION" + + test-build-regular: + needs: extract-version + runs-on: ubuntu-latest + if: github.event.inputs.dockerfile == 'regular' || github.event.inputs.dockerfile == 'both' || github.event_name != 'workflow_dispatch' + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache-dev + key: ${{ runner.os }}-buildx-dev-regular-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx-dev-regular- + + - name: Build regular image (test only) + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + platforms: ${{ github.event.inputs.platform || 'linux/amd64' }} + push: false + tags: ${{ env.IMAGE_NAME }}:${{ needs.extract-version.outputs.version }}-dev-regular + cache-from: type=local,src=/tmp/.buildx-cache-dev + cache-to: type=local,dest=/tmp/.buildx-cache-dev-new,mode=max + + - name: Move cache + run: | + rm -rf /tmp/.buildx-cache-dev + mv /tmp/.buildx-cache-dev-new /tmp/.buildx-cache-dev + + test-build-single: + needs: extract-version + runs-on: ubuntu-latest + if: github.event.inputs.dockerfile == 'single' || github.event.inputs.dockerfile == 'both' || github.event_name != 'workflow_dispatch' + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache-dev-single + key: ${{ runner.os }}-buildx-dev-single-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx-dev-single- + + - name: Build single-container image (test only) + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile.single + platforms: ${{ github.event.inputs.platform || 'linux/amd64' }} + push: false + tags: ${{ env.IMAGE_NAME }}:${{ needs.extract-version.outputs.version }}-dev-single + cache-from: type=local,src=/tmp/.buildx-cache-dev-single + cache-to: type=local,dest=/tmp/.buildx-cache-dev-single-new,mode=max + + - name: Move cache + run: | + rm -rf /tmp/.buildx-cache-dev-single + mv /tmp/.buildx-cache-dev-single-new /tmp/.buildx-cache-dev-single + + summary: + needs: [extract-version, test-build-regular, test-build-single] + runs-on: ubuntu-latest + if: always() + steps: + - name: Development Build Summary + run: | + echo "## Development Build Summary" >> $GITHUB_STEP_SUMMARY + echo "**Version:** ${{ needs.extract-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "**Platform:** ${{ github.event.inputs.platform || 'linux/amd64' }}" >> $GITHUB_STEP_SUMMARY + echo "**Dockerfile:** ${{ github.event.inputs.dockerfile || 'both' }}" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Results:" >> $GITHUB_STEP_SUMMARY + + if [[ "${{ needs.test-build-regular.result }}" == "success" ]]; then + echo "βœ… **Regular Dockerfile:** Build successful" >> $GITHUB_STEP_SUMMARY + elif [[ "${{ needs.test-build-regular.result }}" == "skipped" ]]; then + echo "⏭️ **Regular Dockerfile:** Skipped" >> $GITHUB_STEP_SUMMARY + else + echo "❌ **Regular Dockerfile:** Build failed" >> $GITHUB_STEP_SUMMARY + fi + + if [[ "${{ needs.test-build-single.result }}" == "success" ]]; then + echo "βœ… **Single Dockerfile:** Build successful" >> $GITHUB_STEP_SUMMARY + elif [[ "${{ needs.test-build-single.result }}" == "skipped" ]]; then + echo "⏭️ **Single Dockerfile:** Skipped" >> $GITHUB_STEP_SUMMARY + else + echo "❌ **Single Dockerfile:** Build failed" >> $GITHUB_STEP_SUMMARY + fi + + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Notes:" >> $GITHUB_STEP_SUMMARY + echo "- This is a development build (no images pushed to registry)" >> $GITHUB_STEP_SUMMARY + echo "- For production releases, use the 'Build and Release' workflow" >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml new file mode 100644 index 0000000000000000000000000000000000000000..ecd27d0a557725caddc8547ec8e92d2874e58f62 --- /dev/null +++ b/.github/workflows/claude-code-review.yml @@ -0,0 +1,75 @@ +name: Claude Code Review + +on: + pull_request: + types: [opened, synchronize] + # Optional: Only run on specific file changes + # paths: + # - "src/**/*.ts" + # - "src/**/*.tsx" + # - "src/**/*.js" + # - "src/**/*.jsx" + +jobs: + claude-review: + # Optional: Filter by PR author + # if: | + # github.event.pull_request.user.login == 'external-contributor' || + # github.event.pull_request.user.login == 'new-developer' || + # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' + + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + issues: read + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Run Claude Code Review + id: claude-review + uses: anthropics/claude-code-action@beta + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) + # model: "claude-opus-4-20250514" + + # Direct prompt for automated review (no @claude mention needed) + direct_prompt: | + Please review this pull request and provide feedback on: + - Code quality and best practices + - Potential bugs or issues + - Performance considerations + - Security concerns + - Test coverage + + Be constructive and helpful in your feedback. + + # Optional: Customize review based on file types + # direct_prompt: | + # Review this PR focusing on: + # - For TypeScript files: Type safety and proper interface usage + # - For API endpoints: Security, input validation, and error handling + # - For React components: Performance, accessibility, and best practices + # - For tests: Coverage, edge cases, and test quality + + # Optional: Different prompts for different authors + # direct_prompt: | + # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && + # 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' || + # 'Please provide a thorough code review focusing on our coding standards and best practices.' }} + + # Optional: Add specific tools for running tests or linting + # allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)" + + # Optional: Skip review for certain conditions + # if: | + # !contains(github.event.pull_request.title, '[skip-review]') && + # !contains(github.event.pull_request.title, '[WIP]') + diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml new file mode 100644 index 0000000000000000000000000000000000000000..58d0fa2ec1e29ed237f01f259ed7f65520b20bbc --- /dev/null +++ b/.github/workflows/claude.yml @@ -0,0 +1,59 @@ +name: Claude Code + +on: + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + issues: + types: [opened, assigned] + pull_request_review: + types: [submitted] + +jobs: + claude: + if: | + (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || + (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + issues: read + id-token: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Run Claude Code + id: claude + uses: anthropics/claude-code-action@beta + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) + # model: "claude-opus-4-20250514" + + # Optional: Customize the trigger phrase (default: @claude) + # trigger_phrase: "/claude" + + # Optional: Trigger when specific user is assigned to an issue + # assignee_trigger: "claude-bot" + + # Optional: Allow Claude to run specific commands + # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" + + # Optional: Add custom instructions for Claude to customize its behavior for your project + # custom_instructions: | + # Follow our coding standards + # Ensure all new code has tests + # Use TypeScript for new files + + # Optional: Custom environment variables for Claude + # claude_env: | + # NODE_ENV: test + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..3ab53da6082be43f0ae76de596165957b2f9b3e5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,137 @@ +.env +prompts/patterns/user/ +/notebooks/ +data/ +.uploads/ +sqlite-db/ +surreal-data/ +surreal_data/ +docker.env +!setup_guide/docker.env +notebook_data/ +# Python-specific +*.py[cod] +__pycache__/ +*.so +todo.md +temp/ +google-credentials.json +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +/lib/ +/lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# PyCharm +.idea/ + +# VS Code +.vscode/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# macOS +.DS_Store + +# Windows +Thumbs.db +ehthumbs.db +desktop.ini + +# Linux +*~ + +# Log files +*.log + +# Database files +*.db +*.sqlite3 + +.quarentena + +claude-logs/ +.claude/sessions +**/claude-logs + + +docs/custom_gpt +doc_exports/ + +specs/ +.claude + +.playwright-mcp/ \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 0000000000000000000000000000000000000000..e4fba2183587225f216eeada4c78dfab6b2e65f5 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/.railwayignore b/.railwayignore new file mode 100644 index 0000000000000000000000000000000000000000..175a764e6b165b929f62aace0e2f68bfeb470350 --- /dev/null +++ b/.railwayignore @@ -0,0 +1,75 @@ +# Railway Build Optimization - Ignore unnecessary files + +# Development +.git/ +.github/ +.vscode/ +*.md +!RAILWAY.md +!README.md +docs/ +.env.example +.env.railway +.gitignore + +# Data directories (will be empty in repo anyway) +data/ +notebook_data/ +surreal_data/ +surreal_single_data/ + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +*.egg-info/ +.pytest_cache/ +.mypy_cache/ +.ruff_cache/ + +# Node +node_modules/ +.next/ +.turbo/ +out/ +build/ +dist/ + +# IDEs +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Scripts not needed in production +start-dev.ps1 +start-production.ps1 +stop-services.ps1 +diagnose.ps1 +Makefile + +# Alternative docker files +Dockerfile +Dockerfile.single +docker-compose*.yml +supervisord.conf +supervisord.single.conf + +# Tests +tests/ +*.test.ts +*.test.js +*.spec.ts +*.spec.js + +# CI/CD +.gitlab-ci.yml + +# Logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* diff --git a/.worktreeinclude b/.worktreeinclude new file mode 100644 index 0000000000000000000000000000000000000000..56e4e870aac0d17cd4eabfa16701fd37b5d718a8 --- /dev/null +++ b/.worktreeinclude @@ -0,0 +1,5 @@ +.env +.env.local +.env.* +**/.claude/settings.local.json +CLAUDE.local.md diff --git a/DEPLOY_NOW.md b/DEPLOY_NOW.md new file mode 100644 index 0000000000000000000000000000000000000000..d0dac330e51ed9828290116ae48d99bb35012ef6 --- /dev/null +++ b/DEPLOY_NOW.md @@ -0,0 +1,201 @@ +# 🎯 DEPLOY NOW - Everything Ready! + +## βœ… What's Configured + +I've set up everything to work exactly like your localhost, using your FREE API keys: + +- βœ… **Groq API**: `gsk_3pLc...kvfC` - For chat, transformations, insights +- βœ… **Gemini API**: `AIzaS...ep_0` - For embeddings, search, long context +- βœ… **Database**: test namespace/database (same as localhost) +- βœ… **All settings**: Same retry/worker config as your working setup + +--- + +## πŸš€ STEP 1: Copy Railway Variables + +Go to Railway Dashboard β†’ Your Service β†’ Variables, and paste ALL of these: + +```plaintext +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test +INTERNAL_API_URL=http://127.0.0.1:5055 +API_URL=http://localhost:5055 +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 +GROQ_API_KEY= +GOOGLE_API_KEY= +``` + +**Note**: We'll update `API_URL` after first deploy. + +--- + +## πŸš€ STEP 2: Push Code + +```powershell +cd c:\sem6-real\studyrocket\notebookllm\open-notebook +git add . +git commit -m "Add Railway deployment with FREE tier (Groq + Gemini)" +git push origin main +``` + +--- + +## πŸš€ STEP 3: Wait for Deploy + +Railway will: +1. Build the Docker image (~5-10 minutes) +2. Start all services (SurrealDB, API, Worker, Frontend) +3. Run migrations (0 β†’ 18) +4. Expose your app on a public URL + +**Watch the logs** in Railway dashboard for: +``` +βœ“ Ready in XXXms +INFO: Application startup complete +Migrations completed successfully. Database is now at version 18 +``` + +--- + +## πŸš€ STEP 4: Update API_URL + +1. Find your Railway domain in the dashboard (e.g., `https://se-production-1a2b.up.railway.app`) +2. Update the `API_URL` variable: + ```plaintext + API_URL=https://se-production-1a2b.up.railway.app + ``` +3. Railway will auto-redeploy (~1 minute) + +--- + +## βœ… STEP 5: Test Everything + +Visit your app: `https://your-railway-domain.up.railway.app` + +**Test these features:** +- βœ… Create a notebook +- βœ… Upload a document (tests embeddings) +- βœ… Search documents (tests Gemini embeddings) +- βœ… Chat with documents (tests Groq LLM) +- βœ… Generate insights (tests transformations) +- βœ… Create notes + +**Skip for now:** +- ⏸️ Podcast generation (you'll configure later) + +--- + +## πŸŽ‰ What You'll Have + +### Working Features (FREE): +- βœ… Chat using Groq Llama 3.1 70B +- βœ… Document embeddings using Gemini +- βœ… Semantic search using Gemini +- βœ… Transformations using Groq +- βœ… Insights using Groq +- βœ… Long context (1M tokens!) using Gemini +- βœ… All for **$0/month** (AI costs) + +### Railway Costs: +- First month: **FREE** ($5 credit) +- After: **$5-10/month** (just hosting) + +--- + +## πŸ”§ Models Available + +In the UI, you can select from: + +### Groq Models (FREE): +- `llama-3.1-70b-versatile` - Best for complex tasks +- `llama-3.1-8b-instant` - Fast for simple tasks +- `mixtral-8x7b-32768` - Alternative option + +### Gemini Models (FREE): +- `gemini-1.5-flash` - Fast, FREE +- `gemini-1.5-pro` - 1M context, FREE tier +- `text-embedding-004` - Embeddings + +--- + +## πŸ†˜ If Something Goes Wrong + +### Build Fails +β†’ Check Railway logs for error message +β†’ Ensure all files are committed (especially migrations/18.surrealql) + +### Services Won't Start +β†’ Check `SURREAL_URL=ws://127.0.0.1:8000/rpc` (not localhost!) +β†’ Verify both API keys are set correctly + +### Can't Access App +β†’ Wait 2-3 minutes after deploy +β†’ Check `API_URL` is set to your Railway domain +β†’ Try incognito/private browser window + +### Features Don't Work +β†’ Groq models: Check chat works in UI +β†’ Gemini embeddings: Try uploading a document +β†’ If API key issues: Regenerate keys at provider dashboards + +--- + +## πŸ“Š Your Setup Summary + +| Component | Configuration | Status | +|-----------|--------------|--------| +| **Database** | SurrealDB (embedded) | βœ… Ready | +| **API** | FastAPI on port 5055 | βœ… Ready | +| **Frontend** | Next.js on port 8080 | βœ… Ready | +| **Worker** | Background tasks | βœ… Ready | +| **LLM** | Groq Llama 3.1 | βœ… FREE | +| **Embeddings** | Gemini | βœ… FREE | +| **Hosting** | Railway | βœ… $5-10/mo | +| **Podcasts** | Not configured | ⏸️ Later | + +--- + +## 🎊 Next Steps After Deploy + +1. βœ… Test all features (except podcasts) +2. βœ… Upload some test documents +3. βœ… Try searching and chatting +4. βœ… Generate some insights +5. ⏸️ Configure podcasts later when needed + +--- + +## πŸ’° Cost Tracking + +Track your FREE tier usage: +- **Groq**: https://console.groq.com/dashboard +- **Gemini**: https://console.cloud.google.com/apis/dashboard +- **Railway**: https://railway.app/dashboard + +All providers show FREE tier limits and usage! + +--- + +## πŸš€ Ready to Deploy! + +Everything is configured. Just run: + +```powershell +git add . +git commit -m "Railway deployment ready with FREE tier keys" +git push origin main +``` + +Then watch Railway build and deploy! πŸŽ‰ + +--- + +**Questions?** Everything should work exactly like your localhost setup, but on Railway! The same models, same features (minus podcasts), all working with your FREE API keys. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..f4c26f1ddf85a67e94ff5c73234b33224aeecdb9 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,66 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Set PYTHONPATH to include /app +ENV PYTHONPATH=/app + +# Set Hugging Face cache directories (writable in HF Spaces) +ENV HF_HOME=/tmp +ENV TRANSFORMERS_CACHE=/tmp +ENV SENTENCE_TRANSFORMERS_HOME=/tmp + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + curl \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Install SurrealDB +RUN curl -sSf https://install.surrealdb.com | sh + +# Copy requirements.txt for dependency installation +COPY requirements.txt ./ + +# Install Python dependencies from requirements.txt +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r requirements.txt + +# Explicitly ensure surreal-commands is installed (belt-and-suspenders approach) +RUN pip install --no-cache-dir surreal-commands>=1.2.0 + +# Pre-download sentence-transformers model at build time +# This will be cached in the Docker image +RUN python -c "from sentence_transformers import SentenceTransformer; SentenceTransformer('all-MiniLM-L6-v2')" + +# Copy application code +COPY api/ ./api/ +COPY open_notebook/ ./open_notebook/ +COPY commands/ ./commands/ +COPY migrations/ ./migrations/ +COPY prompts/ ./prompts/ +COPY run_api.py ./ +COPY start.sh ./ + +# Make start script executable +RUN chmod +x start.sh + +# Set environment variables for SurrealDB connection +ENV SURREAL_URL=ws://localhost:8000/rpc +ENV SURREAL_ADDRESS=localhost +ENV SURREAL_PORT=8000 +ENV SURREAL_USER=root +ENV SURREAL_PASS=root +ENV SURREAL_NAMESPACE=open_notebook +ENV SURREAL_DATABASE=main + +# Set API configuration for Hugging Face Spaces +ENV API_HOST=0.0.0.0 +ENV API_PORT=7860 +ENV API_RELOAD=false + +# Expose Hugging Face Spaces port +EXPOSE 7860 + +# Run the start script +CMD ["./start.sh"] diff --git a/Dockerfile.huggingface b/Dockerfile.huggingface new file mode 100644 index 0000000000000000000000000000000000000000..f4c26f1ddf85a67e94ff5c73234b33224aeecdb9 --- /dev/null +++ b/Dockerfile.huggingface @@ -0,0 +1,66 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Set PYTHONPATH to include /app +ENV PYTHONPATH=/app + +# Set Hugging Face cache directories (writable in HF Spaces) +ENV HF_HOME=/tmp +ENV TRANSFORMERS_CACHE=/tmp +ENV SENTENCE_TRANSFORMERS_HOME=/tmp + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + curl \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Install SurrealDB +RUN curl -sSf https://install.surrealdb.com | sh + +# Copy requirements.txt for dependency installation +COPY requirements.txt ./ + +# Install Python dependencies from requirements.txt +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r requirements.txt + +# Explicitly ensure surreal-commands is installed (belt-and-suspenders approach) +RUN pip install --no-cache-dir surreal-commands>=1.2.0 + +# Pre-download sentence-transformers model at build time +# This will be cached in the Docker image +RUN python -c "from sentence_transformers import SentenceTransformer; SentenceTransformer('all-MiniLM-L6-v2')" + +# Copy application code +COPY api/ ./api/ +COPY open_notebook/ ./open_notebook/ +COPY commands/ ./commands/ +COPY migrations/ ./migrations/ +COPY prompts/ ./prompts/ +COPY run_api.py ./ +COPY start.sh ./ + +# Make start script executable +RUN chmod +x start.sh + +# Set environment variables for SurrealDB connection +ENV SURREAL_URL=ws://localhost:8000/rpc +ENV SURREAL_ADDRESS=localhost +ENV SURREAL_PORT=8000 +ENV SURREAL_USER=root +ENV SURREAL_PASS=root +ENV SURREAL_NAMESPACE=open_notebook +ENV SURREAL_DATABASE=main + +# Set API configuration for Hugging Face Spaces +ENV API_HOST=0.0.0.0 +ENV API_PORT=7860 +ENV API_RELOAD=false + +# Expose Hugging Face Spaces port +EXPOSE 7860 + +# Run the start script +CMD ["./start.sh"] diff --git a/Dockerfile.railway b/Dockerfile.railway new file mode 100644 index 0000000000000000000000000000000000000000..42aed73dd416c55aabee2eb3b26db86d77e47a0e --- /dev/null +++ b/Dockerfile.railway @@ -0,0 +1,80 @@ +# Railway-optimized Dockerfile for Open Notebook +# Uses single-container architecture with all services + +# Build stage +FROM python:3.12-slim-bookworm AS builder + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +# Install build dependencies +RUN apt-get update && apt-get upgrade -y && apt-get install -y \ + gcc g++ git make curl \ + && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get install -y nodejs \ + && rm -rf /var/lib/apt/lists/* + +# Build optimization +ENV MAKEFLAGS="-j$(nproc)" \ + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy + +WORKDIR /app + +# Install Python dependencies +COPY pyproject.toml uv.lock ./ +COPY open_notebook/__init__.py ./open_notebook/__init__.py +RUN uv sync --frozen --no-dev + +# Copy application code +COPY . /app + +# Build frontend +WORKDIR /app/frontend +RUN npm ci && npm run build + +WORKDIR /app + +# Runtime stage +FROM python:3.12-slim-bookworm AS runtime + +# Install runtime dependencies +RUN apt-get update && apt-get upgrade -y && apt-get install -y \ + ffmpeg supervisor curl \ + && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get install -y nodejs \ + && rm -rf /var/lib/apt/lists/* + +# Install SurrealDB +RUN curl --proto '=https' --tlsv1.2 -sSf https://install.surrealdb.com | sh + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +WORKDIR /app + +# Copy from builder +COPY --from=builder /app/.venv /app/.venv +COPY --from=builder /app /app + +# Set environment +ENV PORT=8080 \ + PYTHONUNBUFFERED=1 + +# Expose ports +EXPOSE 8080 5055 + +# Create directories +RUN mkdir -p /mydata /app/data /var/log/supervisor + +# Fix script permissions +RUN sed -i 's/\r$//' /app/scripts/wait-for-api.sh && \ + chmod +x /app/scripts/wait-for-api.sh + +# Copy supervisord config +COPY supervisord.railway.conf /app/supervisord.conf + +# Start supervisord +CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"] diff --git a/Dockerfile.single b/Dockerfile.single new file mode 100644 index 0000000000000000000000000000000000000000..559c608d5ecb5844e80db8b93135d63399bbe52b --- /dev/null +++ b/Dockerfile.single @@ -0,0 +1,98 @@ +# Build stage +FROM python:3.12-slim-bookworm AS builder + +# Install uv using the official method +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +# Install system dependencies required for building certain Python packages +# Add Node.js 20.x LTS for building frontend +RUN apt-get update && apt-get upgrade -y && apt-get install -y \ + gcc g++ git make \ + curl \ + && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get install -y nodejs \ + && rm -rf /var/lib/apt/lists/* + +# Set build optimization environment variables +ENV MAKEFLAGS="-j$(nproc)" +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV UV_COMPILE_BYTECODE=1 +ENV UV_LINK_MODE=copy + +# Set the working directory in the container to /app +WORKDIR /app + +# Copy dependency files and minimal package structure first for better layer caching +COPY pyproject.toml uv.lock ./ +COPY open_notebook/__init__.py ./open_notebook/__init__.py + +# Install dependencies with optimizations (this layer will be cached unless dependencies change) +RUN uv sync --frozen --no-dev + +# Copy the rest of the application code +COPY . /app + +# Install frontend dependencies and build +WORKDIR /app/frontend +RUN npm ci +RUN npm run build + +# Return to app root +WORKDIR /app + +# Runtime stage +FROM python:3.12-slim-bookworm AS runtime + +# Install runtime system dependencies including curl for SurrealDB installation +# Add Node.js 20.x LTS for running frontend +RUN apt-get update && apt-get upgrade -y && apt-get install -y \ + ffmpeg \ + supervisor \ + curl \ + && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get install -y nodejs \ + && rm -rf /var/lib/apt/lists/* + +# Install SurrealDB +RUN curl --proto '=https' --tlsv1.2 -sSf https://install.surrealdb.com | sh + +# Install uv using the official method +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +# Set the working directory in the container to /app +WORKDIR /app + +# Copy the virtual environment from builder stage +COPY --from=builder /app/.venv /app/.venv + +# Copy the application code +COPY --from=builder /app /app + +# Default PORT if not provided +ENV PORT=8502 + +# Expose ports for Frontend and API +EXPOSE 8502 5055 + +# Copy single-container supervisord configuration +COPY supervisord.single.conf /app/supervisord.conf + +# Create log directories +RUN mkdir -p /var/log/supervisor + +# Fix line endings for startup script +RUN sed -i 's/\r$//' /app/scripts/wait-for-api.sh +RUN chmod +x /app/scripts/wait-for-api.sh + +# Runtime API URL Configuration +# The API_URL environment variable can be set at container runtime to configure +# where the frontend should connect to the API. This allows the same Docker image +# to work in different deployment scenarios without rebuilding. +# +# If not set, the system will auto-detect based on incoming requests. +# Set API_URL when using reverse proxies or custom domains. +# +# Example: docker run -e API_URL=https://your-domain.com/api ... + +CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"] \ No newline at end of file diff --git a/FINAL_RAILWAY_VARS.md b/FINAL_RAILWAY_VARS.md new file mode 100644 index 0000000000000000000000000000000000000000..243da337dd198dfe54b9ac023993dd17e4f8e057 --- /dev/null +++ b/FINAL_RAILWAY_VARS.md @@ -0,0 +1,59 @@ +# πŸš€ FINAL RAILWAY VARIABLES - Ready to Deploy + +## Copy ALL of these to Railway Dashboard β†’ Variables + +```plaintext +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test +INTERNAL_API_URL=http://127.0.0.1:5055 +API_URL=https://YOUR_RAILWAY_DOMAIN_HERE +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 +GROQ_API_KEY= +GOOGLE_API_KEY= +``` + +## After First Deploy + +Once you get your Railway domain (like `https://se-production-xxxx.up.railway.app`): + +```plaintext +API_URL=https://your-actual-railway-domain.up.railway.app +``` + +--- + +## βœ… What Will Work + +With these keys, these features will work: + +- βœ… **Chat** - Using Groq Llama models +- βœ… **Transformations** - Using Groq Llama models +- βœ… **Embeddings/Search** - Using Gemini embeddings +- βœ… **Long Context** - Using Gemini 1.5 Pro (1M tokens!) +- βœ… **Insights** - Using Groq models +- βœ… **Knowledge Graph** - Using embeddings +- βœ… **Document Upload** - Using embeddings for vectorization + +## ⏸️ What You'll Set Up Later + +- ⏸️ **Podcasts** - You'll configure this later (needs TTS setup) + +--- + +## πŸš€ Deploy Now + +```powershell +git add . +git commit -m "Add FREE tier config with Groq + Gemini API keys" +git push origin main +``` + +Railway will auto-deploy and everything except podcasts will work! πŸŽ‰ diff --git a/FREE_TIER_QUICK.md b/FREE_TIER_QUICK.md new file mode 100644 index 0000000000000000000000000000000000000000..f8a84222f14604e3c7bfacf2d14e5e8f1702d0c8 --- /dev/null +++ b/FREE_TIER_QUICK.md @@ -0,0 +1,104 @@ +# πŸ†“ FREE TIER - Railway Variables (Copy-Paste Ready) + +## ⚑ QUICK SETUP + +### Step 1: Get FREE API Keys + +1. **Groq** (FREE LLM): https://console.groq.com/keys +2. **Gemini** (FREE Embeddings/TTS): https://makersuite.google.com/app/apikey + +### Step 2: Set These in Railway Variables + +```plaintext +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test +INTERNAL_API_URL=http://127.0.0.1:5055 +API_URL=https://YOUR_RAILWAY_DOMAIN_HERE +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 +GROQ_API_KEY=paste_your_groq_key_here +GOOGLE_API_KEY=paste_your_gemini_key_here +``` + +### Step 3: Push Code + +```powershell +git add . +git commit -m "Switch to 100% free tier models (Groq + Gemini)" +git push origin main +``` + +### Step 4: After Deploy + +Update `API_URL` with your actual Railway domain. + +--- + +## 🎯 WHAT CHANGED + +### Podcast Models (Migration 18) + +**Before (BROKEN & PAID):** +- Outline: `openai/gpt-5-mini` ← Model doesn't exist! +- Transcript: `openai/gpt-5-mini` ← Model doesn't exist! +- TTS: `openai/gpt-4o-mini-tts` ← Model doesn't exist! +- **Cost**: Would fail + $15-30/month if fixed + +**After (FREE & WORKING):** +- Outline: `groq/llama-3.1-8b-instant` ← Fast, FREE +- Transcript: `groq/llama-3.1-70b-versatile` ← Smart, FREE +- TTS: `google/gemini-1.5-flash` ← FREE +- **Cost**: $0/month! + +--- + +## πŸ“Š FREE TIER LIMITS + +| Provider | Free Limit | Good For | +|----------|-----------|----------| +| **Groq** | 30 req/min
~7000 req/day | Chat, Transformations, Podcasts | +| **Gemini** | 60 req/min
1500 req/day | Embeddings, Long Context, TTS | +| **Railway** | $5 credit/month | Hosting (costs $5-10/month) | + +**Total Cost**: ~$5-10/month (just hosting!) + +--- + +## βœ… Verification + +After deploy, check logs for: +``` +Migrations completed successfully. Database is now at version 18 +``` + +Then test: +- βœ… Chat works with Groq +- βœ… Search works with Gemini embeddings +- βœ… Podcasts work with Groq + Gemini TTS + +--- + +## πŸ†˜ If It Fails + +### "Migration 18 not found" +β†’ Make sure you committed `migrations/18.surrealql` + +### "GROQ_API_KEY not set" +β†’ Get free key: https://console.groq.com/keys + +### "GOOGLE_API_KEY not set" +β†’ Get free key: https://makersuite.google.com/app/apikey + +### "Model not found" +β†’ Migration 18 probably didn't run. Check logs. + +--- + +**Read `FREE_TIER_SETUP.md` for detailed explanation!** diff --git a/FREE_TIER_SETUP.md b/FREE_TIER_SETUP.md new file mode 100644 index 0000000000000000000000000000000000000000..40934d77c3b1fd445202d7886a1a0c26a5507d58 --- /dev/null +++ b/FREE_TIER_SETUP.md @@ -0,0 +1,375 @@ +# πŸ†“ FREE TIER CONFIGURATION - Complete Guide + +## Your Current Hardcoded Models (From migrations/7.surrealql) + +I analyzed your codebase and found these hardcoded models: + +### Podcast Features: +- **Outline Model**: `gpt-5-mini` (typo - should be `gpt-4o-mini`) +- **Transcript Model**: `gpt-5-mini` (typo - should be `gpt-4o-mini`) +- **TTS Model**: `gpt-4o-mini-tts` +- **Providers**: OpenAI + +**Note**: `gpt-5-mini` is likely a typo in your migration file. OpenAI's model is called `gpt-4o-mini`. + +--- + +## 🎯 100% FREE TIER STRATEGY + +To run this project completely free, you need to use **ONLY free-tier providers with generous limits**: + +### Best Free Providers (Confirmed Free Tiers): + +1. **Groq** - BEST for FREE LLM + - βœ… Completely FREE (generous rate limits) + - βœ… Very fast inference + - βœ… Models: `llama-3.1-70b-versatile`, `llama-3.1-8b-instant`, `mixtral-8x7b-32768` + - βœ… No credit card required + +2. **Google Gemini** - BEST for embeddings & long context + - βœ… FREE tier: 60 requests/minute + - βœ… Models: `gemini-1.5-flash`, `gemini-1.5-pro` (1M context!) + - βœ… Embeddings included FREE + - βœ… No credit card required initially + +3. **OpenAI** - NOT FREE (but you have credit) + - ❌ Requires payment (but $5-18 free credit for new accounts) + - ⚠️ `gpt-4o-mini` costs $0.15/1M input tokens, $0.60/1M output + - ⚠️ TTS costs extra + +--- + +## πŸ“‹ RAILWAY VARIABLES - 100% FREE CONFIGURATION + +Copy these EXACT variables to your Railway dashboard: + +```bash +# ============================================ +# DATABASE (Keep as-is) +# ============================================ +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test + +# ============================================ +# API CONFIGURATION +# ============================================ +INTERNAL_API_URL=http://127.0.0.1:5055 +API_URL=https://YOUR_RAILWAY_DOMAIN_HERE + +# ============================================ +# WORKER & RETRY (Keep as-is) +# ============================================ +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 + +# ============================================ +# FREE TIER AI PROVIDERS +# ============================================ + +# Groq - FREE (Best for LLMs - Chat, Transformations) +# Get FREE key at: https://console.groq.com/keys +GROQ_API_KEY=your_groq_api_key_here + +# Google Gemini - FREE (Best for Embeddings & Long Context) +# Get FREE key at: https://makersuite.google.com/app/apikey +GOOGLE_API_KEY=your_google_gemini_key_here + +# ============================================ +# OPTIONAL: If you have OpenAI credit +# ============================================ +# OPENAI_API_KEY=sk-your_key_if_you_have_credit + +# ============================================ +# DO NOT SET - These are paid services +# ============================================ +# ANTHROPIC_API_KEY= # Claude - PAID +# ELEVENLABS_API_KEY= # TTS - PAID +# MISTRAL_API_KEY= # Mistral - PAID +# DEEPSEEK_API_KEY= # DeepSeek - PAID +``` + +--- + +## βš™οΈ CODE CHANGES REQUIRED + +### Fix Migration 7 (Podcast Models) + +Your migration file has `gpt-5-mini` which doesn't exist. You need to change it to use **FREE Groq models**: + +**File**: `migrations/7.surrealql` + +**Change these lines:** + +```sql +-- BEFORE (Uses paid OpenAI): +outline_provider: "openai", +outline_model: "gpt-5-mini", # ← This is wrong (gpt-5 doesn't exist) +transcript_provider: "openai", +transcript_model: "gpt-5-mini", + +-- AFTER (Uses FREE Groq): +outline_provider: "groq", +outline_model: "llama-3.1-8b-instant", # ← Fast & FREE +transcript_provider: "groq", +transcript_model: "llama-3.1-70b-versatile", # ← Smart & FREE +``` + +**All 3 episode profiles need this change:** +1. `tech_discussion` +2. `solo_expert` +3. `business_analysis` + +--- + +## 🎀 TTS (Text-to-Speech) Problem + +**Issue**: Your migrations use `gpt-4o-mini-tts` which is **NOT FREE** and **DOESN'T EXIST** as a model name. + +OpenAI TTS models are: +- `tts-1` (costs $15/1M characters) +- `tts-1-hd` (costs $30/1M characters) + +### FREE TTS Options: + +1. **Google Gemini MultiModal** (BEST FREE OPTION) + - Use `gemini-1.5-flash` for audio generation + - FREE tier included + +2. **Disable TTS** (if you don't need podcasts) + - Remove podcast functionality to stay 100% free + +3. **Keep OpenAI TTS** (if you have credit) + - Will use your free credit (~500K-1M characters) + +### Recommended: Change to Google TTS (FREE) + +**File**: `migrations/7.surrealql` + +```sql +-- BEFORE (Paid OpenAI TTS): +tts_provider: "openai", +tts_model: "gpt-4o-mini-tts", # ← Doesn't exist, costs money + +-- AFTER (FREE Google TTS): +tts_provider: "google", +tts_model: "gemini-1.5-flash", # ← FREE +``` + +--- + +## πŸ”§ EXACT CHANGES TO MAKE + +### Step 1: Update Migration File + +**File**: `c:\sem6-real\studyrocket\notebookllm\open-notebook\migrations\18.surrealql` (create NEW migration) + +```sql +-- Migration 18: Switch to FREE tier models (Groq + Gemini) + +-- Update all episode profiles to use FREE Groq models +UPDATE episode_profile:tech_discussion SET + outline_provider = "groq", + outline_model = "llama-3.1-8b-instant", + transcript_provider = "groq", + transcript_model = "llama-3.1-70b-versatile"; + +UPDATE episode_profile:solo_expert SET + outline_provider = "groq", + outline_model = "llama-3.1-8b-instant", + transcript_provider = "groq", + transcript_model = "llama-3.1-70b-versatile"; + +UPDATE episode_profile:business_analysis SET + outline_provider = "groq", + outline_model = "llama-3.1-8b-instant", + transcript_provider = "groq", + transcript_model = "llama-3.1-70b-versatile"; + +-- Update all speaker profiles to use FREE Google TTS +UPDATE speaker_profile:tech_experts SET + tts_provider = "google", + tts_model = "gemini-1.5-flash"; + +UPDATE speaker_profile:solo_expert SET + tts_provider = "google", + tts_model = "gemini-1.5-flash"; + +UPDATE speaker_profile:business_panel SET + tts_provider = "google", + tts_model = "gemini-1.5-flash"; +``` + +**File**: `c:\sem6-real\studyrocket\notebookllm\open-notebook\migrations\18_down.surrealql` + +```sql +-- Migration 18 Down: Revert to original OpenAI models + +UPDATE episode_profile:tech_discussion SET + outline_provider = "openai", + outline_model = "gpt-4o-mini", + transcript_provider = "openai", + transcript_model = "gpt-4o-mini"; + +UPDATE episode_profile:solo_expert SET + outline_provider = "openai", + outline_model = "gpt-4o-mini", + transcript_provider = "openai", + transcript_model = "gpt-4o-mini"; + +UPDATE episode_profile:business_analysis SET + outline_provider = "openai", + outline_model = "gpt-4o-mini", + transcript_provider = "openai", + transcript_model = "gpt-4o-mini"; + +UPDATE speaker_profile:tech_experts SET + tts_provider = "openai", + tts_model = "tts-1"; + +UPDATE speaker_profile:solo_expert SET + tts_provider = "openai", + tts_model = "tts-1"; + +UPDATE speaker_profile:business_panel SET + tts_provider = "openai", + tts_model = "tts-1"; +``` + +### Step 2: Register Migration 18 + +**File**: `open_notebook/database/async_migrate.py` + +Add migration 18 to the list (after line with migration 17): + +```python +AsyncMigration.from_file("migrations/18.surrealql"), + +# In down_migrations: +AsyncMigration.from_file("migrations/18_down.surrealql"), +``` + +--- + +## πŸ“Š FREE TIER LIMITS + +### Groq (LLM) +- **Rate Limit**: 30 requests/minute +- **Daily**: Generous (thousands of requests) +- **Models**: Llama 3.1 70B, Llama 3.1 8B, Mixtral +- **Context**: 8K-128K tokens depending on model +- **Cost**: $0 (100% FREE) + +### Google Gemini (Embeddings + LLM + TTS) +- **Rate Limit**: 60 requests/minute (FREE tier) +- **Daily**: 1,500 requests/day (FREE tier) +- **Models**: Gemini 1.5 Flash, Gemini 1.5 Pro +- **Context**: Up to 1 MILLION tokens! +- **Cost**: $0 (FREE tier, then pay-as-you-go) + +### Railway Hosting +- **Free**: $5 credit/month (hobby plan) +- **Usage**: ~$5-10/month for this app +- **Result**: First month FREE, then ~$5-10/month + +--- + +## 🎯 MODEL USAGE BY FEATURE + +Based on my analysis, here's what each feature uses: + +| Feature | Current Model | FREE Alternative | +|---------|--------------|------------------| +| **Chat** | User-selected | Groq: `llama-3.1-70b-versatile` | +| **Transformations** | User-selected | Groq: `llama-3.1-70b-versatile` | +| **Embeddings** | User-selected | Gemini: `text-embedding-004` | +| **Large Context** | User-selected | Gemini: `gemini-1.5-pro` (1M context!) | +| **Podcast Outline** | `gpt-5-mini` (broken) | Groq: `llama-3.1-8b-instant` | +| **Podcast Transcript** | `gpt-5-mini` (broken) | Groq: `llama-3.1-70b-versatile` | +| **TTS (Podcast Audio)** | `gpt-4o-mini-tts` (doesn't exist) | Google: `gemini-1.5-flash` | +| **Search** | Embeddings model | Gemini: `text-embedding-004` | +| **Insights** | Transformation model | Groq: `llama-3.1-70b-versatile` | + +--- + +## βœ… DEPLOYMENT CHECKLIST + +### Before Pushing Code: + +- [ ] Create `migrations/18.surrealql` (use FREE models) +- [ ] Create `migrations/18_down.surrealql` (rollback) +- [ ] Update `async_migrate.py` to include migration 18 +- [ ] Get FREE Groq API key from https://console.groq.com/keys +- [ ] Get FREE Gemini API key from https://makersuite.google.com/app/apikey + +### Railway Variables: + +- [ ] Set `GROQ_API_KEY` (your FREE key) +- [ ] Set `GOOGLE_API_KEY` (your FREE key) +- [ ] Set `SURREAL_URL=ws://127.0.0.1:8000/rpc` (not localhost!) +- [ ] Set `INTERNAL_API_URL=http://127.0.0.1:5055` +- [ ] Keep all retry/worker settings as-is +- [ ] **DO NOT** set `OPENAI_API_KEY` (unless you have credit) + +### After Deploy: + +- [ ] Check logs for "Migrations completed successfully. Database is now at version 18" +- [ ] Test chat with Groq models +- [ ] Test embeddings with Gemini +- [ ] Test podcast generation (if needed) +- [ ] Monitor FREE tier usage in Groq/Gemini dashboards + +--- + +## πŸ’° COST BREAKDOWN + +### Monthly Costs (FREE TIER): + +| Service | Cost | +|---------|------| +| **Groq LLM** | $0 (FREE) | +| **Gemini API** | $0 (FREE tier) | +| **Railway Hosting** | $5-10/month | +| **Domain** (optional) | $10-15/year | +| **Total** | **$5-10/month** | + +### If You Exceed FREE Tiers: + +- **Groq**: Still free (very generous limits) +- **Gemini**: $0.35 per 1M tokens (very cheap) +- **Worst case**: $10-20/month total + +--- + +## 🚨 WARNINGS + +1. **`gpt-5-mini` doesn't exist** - This will cause errors if OpenAI is called +2. **`gpt-4o-mini-tts` doesn't exist** - TTS will fail without migration +3. **Migration 18 is REQUIRED** - Old data uses broken model names +4. **Test locally first** - Run migrations on local DB before Railway + +--- + +## πŸŽ‰ BENEFITS OF FREE TIER SETUP + +βœ… **$0/month for AI** (only pay for Railway hosting) +βœ… **Fast inference** with Groq (faster than OpenAI!) +βœ… **1M token context** with Gemini (vs 128K for GPT-4) +βœ… **No credit card needed** for Groq/Gemini free tiers +βœ… **Scalable** - Upgrade to paid tiers if needed later + +--- + +## πŸ“ž SUPPORT + +Get your FREE API keys: +- πŸ”₯ **Groq**: https://console.groq.com/keys +- 🌟 **Gemini**: https://makersuite.google.com/app/apikey + +Questions? Check the main docs or Discord! diff --git a/FRONTEND_CONNECTION_GUIDE.md b/FRONTEND_CONNECTION_GUIDE.md new file mode 100644 index 0000000000000000000000000000000000000000..81a0112c5cac1ffd908c4b6526777236f66b7ff0 --- /dev/null +++ b/FRONTEND_CONNECTION_GUIDE.md @@ -0,0 +1,215 @@ +# πŸš€ Frontend Connection to Hugging Face Backend + +## βœ… Configuration Complete! + +Your Next.js frontend is now configured to connect to your Hugging Face Space backend. + +--- + +## πŸ“‹ Changes Made + +### 1. βœ… Frontend Environment Configuration + +**Created:** [`frontend/.env.local`](frontend/.env.local) + +```env +NEXT_PUBLIC_API_URL=https://baveshraam-open-notebook.hf.space +INTERNAL_API_URL=https://baveshraam-open-notebook.hf.space +``` + +This tells the frontend where to find your deployed backend API. + +### 2. βœ… API Client Already Configured + +**File:** [`frontend/src/lib/api/client.ts`](frontend/src/lib/api/client.ts) + +The API client already uses dynamic configuration: +- βœ… Reads from `process.env.NEXT_PUBLIC_API_URL` +- βœ… Falls back to auto-detection +- βœ… Ultimate fallback to `http://127.0.0.1:5055` + +**No changes needed!** The existing code will automatically use your `.env.local` settings. + +### 3. βœ… CORS Configuration Updated + +**File:** [`api/main.py`](api/main.py) + +Updated CORS to allow requests from: +- βœ… `http://localhost:3000` (local development) +- βœ… `http://127.0.0.1:3000` (local development) +- βœ… `https://baveshraam-open-notebook.hf.space` (your HF Space) +- βœ… `*` (wildcard - allows any origin) + +### 4. βœ… Hardcoded URLs Checked + +All hardcoded `localhost:5055` references in the frontend are: +- βœ… **Fallback defaults only** (when env vars not set) +- βœ… **Example text in error messages** (documentation) +- βœ… **No action needed** - proper fallback behavior + +--- + +## 🎯 Next Steps + +### Step 1: Deploy Backend Changes to Hugging Face + +The CORS update needs to be deployed to your Hugging Face Space: + +```bash +git add api/main.py +git commit -m "Update CORS for frontend connection" +git push +``` + +Wait for Hugging Face to rebuild (check the Space logs). + +### Step 2: Start Your Frontend Locally + +```bash +cd frontend +npm install # If not already done +npm run dev +``` + +Your frontend will start on `http://localhost:3000` and connect to: +- **Backend API:** `https://baveshraam-open-notebook.hf.space` + +### Step 3: Test the Connection + +1. **Open browser:** http://localhost:3000 +2. **Check browser console** for API connection messages +3. **Try creating a notebook** or any API-dependent feature +4. **Check Network tab** to verify requests go to your HF Space URL + +--- + +## πŸ” How It Works + +### Configuration Priority + +The frontend uses a smart fallback system: + +``` +1. Runtime config from /config endpoint (uses .env.local) + ↓ +2. Build-time NEXT_PUBLIC_API_URL + ↓ +3. Auto-detection from browser URL + ↓ +4. Fallback to http://127.0.0.1:5055 +``` + +### Environment Variables + +| Variable | Used By | Purpose | +|----------|---------|---------| +| `NEXT_PUBLIC_API_URL` | Browser | Client-side API calls | +| `INTERNAL_API_URL` | Next.js Server | Server-side proxying | + +### URL Structure + +The frontend automatically constructs API URLs: + +- Base URL: `https://baveshraam-open-notebook.hf.space` +- API Endpoint: `/api` (added automatically) +- Full API URL: `https://baveshraam-open-notebook.hf.space/api` + +--- + +## πŸ› οΈ Troubleshooting + +### Issue: "Failed to fetch" or CORS errors + +**Solution:** +1. Verify backend is running: https://baveshraam-open-notebook.hf.space/health +2. Check backend logs for CORS rejections +3. Ensure CORS changes were deployed (check git commit) + +### Issue: Frontend still connects to localhost + +**Solution:** +1. Verify `.env.local` file exists in `frontend/` directory +2. Restart Next.js dev server: `npm run dev` +3. Check browser console for config messages +4. Clear browser cache and reload + +### Issue: 404 errors on /api/* endpoints + +**Solution:** +1. Check that backend is running: https://baveshraam-open-notebook.hf.space/api/config +2. Verify the URL doesn't have double `/api/api/` +3. Check Next.js rewrite configuration in `next.config.ts` + +--- + +## πŸ“ Environment Files Reference + +### `.env.local` (active configuration) +Your current deployment settings. + +### `.env.local.example` (template) +Copy this when deploying to new environments. + +### `.env.example` (backend configuration) +Backend environment variables (already configured on HF Space). + +--- + +## πŸŽ‰ Success Indicators + +You'll know the connection works when: + +1. βœ… Browser console shows: `βœ… [Config] Successfully loaded API config` +2. βœ… Network tab shows requests to `baveshraam-open-notebook.hf.space` +3. βœ… No CORS errors in browser console +4. βœ… Features like "Create Notebook" work correctly +5. βœ… Health check responds: https://baveshraam-open-notebook.hf.space/health + +--- + +## πŸš€ Deploy Frontend (Optional) + +When ready to deploy your frontend: + +### Vercel / Netlify +Add environment variables in dashboard: +``` +NEXT_PUBLIC_API_URL=https://baveshraam-open-notebook.hf.space +INTERNAL_API_URL=https://baveshraam-open-notebook.hf.space +``` + +### Docker +```bash +docker build -t open-notebook-frontend ./frontend +docker run -p 3000:3000 \ + -e NEXT_PUBLIC_API_URL=https://baveshraam-open-notebook.hf.space \ + -e INTERNAL_API_URL=https://baveshraam-open-notebook.hf.space \ + open-notebook-frontend +``` + +--- + +## πŸ“š Architecture Overview + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Browser β”‚ +β”‚ localhost:3000 β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ NEXT_PUBLIC_API_URL + ↓ +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Hugging Face Space Backend β”‚ +β”‚ baveshraam-open-notebook.hf.space β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ FastAPI β”‚ ←──→ β”‚ SurrealDBβ”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +--- + +## 🎊 You're All Set! + +Your frontend is now ready to connect to your Hugging Face deployed backend. Start the frontend with `npm run dev` and test away! diff --git a/HUGGINGFACE_DEPLOYMENT.md b/HUGGINGFACE_DEPLOYMENT.md new file mode 100644 index 0000000000000000000000000000000000000000..7868f7327a0c5ac957178214a258292a51406107 --- /dev/null +++ b/HUGGINGFACE_DEPLOYMENT.md @@ -0,0 +1,197 @@ +# Hugging Face Spaces Deployment Guide + +This guide explains how to deploy Open Notebook to Hugging Face Spaces using the Docker SDK. + +## Files Created + +1. **`Dockerfile.huggingface`** - Docker configuration for single-container deployment +2. **`start.sh`** - Startup script that launches SurrealDB and FastAPI +3. **`open_notebook/database/connection.py`** - SurrealDB connection with retry logic +4. **`requirements.txt`** - Python dependencies extracted from pyproject.toml +5. **`README_HUGGINGFACE.md`** - Hugging Face Spaces README + +## Deployment Steps + +### 1. Rename Dockerfile + +```bash +# Rename the Hugging Face Dockerfile to the standard name +mv Dockerfile.huggingface Dockerfile +``` + +### 2. Create Hugging Face Space + +1. Go to [huggingface.co/new-space](https://huggingface.co/new-space) +2. Choose: + - **Space name**: `open-notebook` (or your preferred name) + - **License**: MIT + - **SDK**: Docker + - **Visibility**: Public or Private + +### 3. Push Code to Hugging Face + +```bash +# Add Hugging Face remote +git remote add hf https://huggingface.co/spaces/YOUR_USERNAME/open-notebook + +# Push to Hugging Face +git push hf main +``` + +### 4. Configure Secrets + +In your Hugging Face Space settings, add these secrets: + +**Required (add at least one)**: +- `OPENAI_API_KEY` - Your OpenAI API key +- `ANTHROPIC_API_KEY` - Your Anthropic (Claude) API key +- `GOOGLE_API_KEY` - Your Google (Gemini) API key + +**Optional**: +- `GROQ_API_KEY` - Groq API key +- `MISTRAL_API_KEY` - Mistral API key + +### 5. Wait for Build + +Hugging Face will automatically build your Docker container. This takes about 10-15 minutes. + +## Important Notes + +### Port Configuration + +Hugging Face Spaces requires port **7860**. The Dockerfile is configured to use this port. + +### In-Memory Storage + +This deployment uses SurrealDB in **memory mode** (`memory`). This means: +- βœ… Fast performance +- βœ… No disk space issues +- ❌ Data is lost when container restarts +- ❌ Not suitable for production + +For persistent storage, modify `start.sh`: +```bash +# Change from: +surreal start --log debug --user root --pass root memory & + +# To: +surreal start --log debug --user root --pass root file://data/database.db & +``` + +### Retry Logic + +The connection module (`open_notebook/database/connection.py`) includes: +- **5 retry attempts** with exponential backoff +- **2-second initial delay**, increasing with each retry +- Ensures SurrealDB is ready before FastAPI starts accepting requests + +### Resource Limits + +Hugging Face Spaces free tier: +- **2 CPU cores** +- **16GB RAM** +- **50GB disk** (ephemeral) + +The sentence-transformer model (`all-MiniLM-L6-v2`) is pre-downloaded during build to avoid startup delays. + +## Testing Your Deployment + +Once deployed, test these endpoints: + +```bash +# Health check +curl https://YOUR_USERNAME-open-notebook.hf.space/health + +# API documentation +https://YOUR_USERNAME-open-notebook.hf.space/docs + +# Create a notebook +curl -X POST https://YOUR_USERNAME-open-notebook.hf.space/api/notebooks \ + -H "Content-Type: application/json" \ + -d '{"name": "Test Notebook", "description": "My first notebook"}' +``` + +## Troubleshooting + +### Build Fails + +Check the build logs in Hugging Face. Common issues: +- **Missing dependencies**: Verify all packages in requirements.txt +- **SurrealDB install fails**: Check internet connectivity during build +- **Out of memory**: Reduce the size of pre-downloaded models + +### Runtime Errors + +Check the runtime logs: +- **"Connection refused"**: SurrealDB didn't start - increase wait time in start.sh +- **"Database migration failed"**: Check SURREAL_* environment variables +- **"Model not found"**: Ensure sentence-transformers model downloaded during build + +### Performance Issues + +On free tier: +- Limit concurrent requests +- Use lighter LLM models (Gemini, GPT-3.5-turbo) +- Reduce chunk size for embeddings +- Enable caching for repeated queries + +## Upgrading to Persistent Storage + +To use external SurrealDB with persistent storage: + +1. Deploy SurrealDB separately (Railway, Fly.io, etc.) +2. Update environment variables in Hugging Face settings: + ``` + SURREAL_URL=wss://your-surrealdb-instance.com/rpc + SURREAL_USER=your_username + SURREAL_PASS=your_password + ``` +3. Remove SurrealDB startup from `start.sh`: + ```bash + #!/bin/bash + set -e + echo "Starting FastAPI application on port 7860..." + exec uvicorn api.main:app --host 0.0.0.0 --port 7860 + ``` + +## Alternative Deployment: Split Services + +For better performance, consider splitting frontend and backend: + +**Backend Space** (this configuration): +- Python Docker SDK +- FastAPI + SurrealDB +- Port 7860 + +**Frontend Space** (separate): +- Node.js SDK or Static +- Next.js frontend +- Points to backend API + +## Cost Optimization + +**Free Tier Recommendations**: +- Use Google Gemini (free tier: 60 requests/min) +- Pre-generate embeddings during low traffic +- Implement request queuing +- Cache LLM responses + +**Paid Tier Benefits** ($9/month): +- No cold starts +- More CPU/RAM +- Persistent storage +- Custom domains + +## Security Considerations + +1. **Never commit API keys** - Use Hugging Face Secrets +2. **Enable authentication** - Modify `api/auth.py` to add user login +3. **Rate limiting** - Add rate limits to prevent abuse +4. **CORS configuration** - Restrict allowed origins in production +5. **Input validation** - All file uploads should be validated + +## Support + +- **Discord**: [https://discord.gg/37XJPXfz2w](https://discord.gg/37XJPXfz2w) +- **GitHub Issues**: [https://github.com/baveshraam/software-eng-proj/issues](https://github.com/baveshraam/software-eng-proj/issues) +- **Documentation**: [https://www.open-notebook.ai](https://www.open-notebook.ai) diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..13f4e6550e06b98aa76a0af8cbb7de437de69e34 --- /dev/null +++ b/LICENSE @@ -0,0 +1,17 @@ +MIT License +Copyright (c) 2024 Luis Novo +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..69a5a96cccad6c70831a999ea4f928f8bff8863a --- /dev/null +++ b/Makefile @@ -0,0 +1,201 @@ +.PHONY: run frontend check ruff database lint api start-all stop-all status clean-cache worker worker-start worker-stop worker-restart +.PHONY: docker-buildx-prepare docker-buildx-clean docker-buildx-reset +.PHONY: docker-push docker-push-latest docker-release tag export-docs + +# Get version from pyproject.toml +VERSION := $(shell grep -m1 version pyproject.toml | cut -d'"' -f2) + +# Image names for both registries +DOCKERHUB_IMAGE := lfnovo/open_notebook +GHCR_IMAGE := ghcr.io/lfnovo/open-notebook + +# Build platforms +PLATFORMS := linux/amd64,linux/arm64 + +database: + docker compose up -d surrealdb + +run: + @echo "⚠️ Warning: Starting frontend only. For full functionality, use 'make start-all'" + cd frontend && npm run dev + +frontend: + cd frontend && npm run dev + +lint: + uv run python -m mypy . + +ruff: + ruff check . --fix + +# === Docker Build Setup === +docker-buildx-prepare: + @docker buildx inspect multi-platform-builder >/dev/null 2>&1 || \ + docker buildx create --use --name multi-platform-builder --driver docker-container + @docker buildx use multi-platform-builder + +docker-buildx-clean: + @echo "🧹 Cleaning up buildx builders..." + @docker buildx rm multi-platform-builder 2>/dev/null || true + @docker ps -a | grep buildx_buildkit | awk '{print $$1}' | xargs -r docker rm -f 2>/dev/null || true + @echo "βœ… Buildx cleanup complete!" + +docker-buildx-reset: docker-buildx-clean docker-buildx-prepare + @echo "βœ… Buildx reset complete!" + +# === Docker Build Targets === + +# Build and push version tags ONLY (no latest) for both regular and single images +docker-push: docker-buildx-prepare + @echo "πŸ“€ Building and pushing version $(VERSION) to both registries..." + @echo "πŸ”¨ Building regular image..." + docker buildx build --pull \ + --platform $(PLATFORMS) \ + --progress=plain \ + -t $(DOCKERHUB_IMAGE):$(VERSION) \ + -t $(GHCR_IMAGE):$(VERSION) \ + --push \ + . + @echo "πŸ”¨ Building single-container image..." + docker buildx build --pull \ + --platform $(PLATFORMS) \ + --progress=plain \ + -f Dockerfile.single \ + -t $(DOCKERHUB_IMAGE):$(VERSION)-single \ + -t $(GHCR_IMAGE):$(VERSION)-single \ + --push \ + . + @echo "βœ… Pushed version $(VERSION) to both registries (latest NOT updated)" + @echo " πŸ“¦ Docker Hub:" + @echo " - $(DOCKERHUB_IMAGE):$(VERSION)" + @echo " - $(DOCKERHUB_IMAGE):$(VERSION)-single" + @echo " πŸ“¦ GHCR:" + @echo " - $(GHCR_IMAGE):$(VERSION)" + @echo " - $(GHCR_IMAGE):$(VERSION)-single" + +# Update v1-latest tags to current version (both regular and single images) +docker-push-latest: docker-buildx-prepare + @echo "πŸ“€ Updating v1-latest tags to version $(VERSION)..." + @echo "πŸ”¨ Building regular image with latest tag..." + docker buildx build --pull \ + --platform $(PLATFORMS) \ + --progress=plain \ + -t $(DOCKERHUB_IMAGE):$(VERSION) \ + -t $(DOCKERHUB_IMAGE):v1-latest \ + -t $(GHCR_IMAGE):$(VERSION) \ + -t $(GHCR_IMAGE):v1-latest \ + --push \ + . + @echo "πŸ”¨ Building single-container image with latest tag..." + docker buildx build --pull \ + --platform $(PLATFORMS) \ + --progress=plain \ + -f Dockerfile.single \ + -t $(DOCKERHUB_IMAGE):$(VERSION)-single \ + -t $(DOCKERHUB_IMAGE):v1-latest-single \ + -t $(GHCR_IMAGE):$(VERSION)-single \ + -t $(GHCR_IMAGE):v1-latest-single \ + --push \ + . + @echo "βœ… Updated v1-latest to version $(VERSION)" + @echo " πŸ“¦ Docker Hub:" + @echo " - $(DOCKERHUB_IMAGE):$(VERSION) β†’ v1-latest" + @echo " - $(DOCKERHUB_IMAGE):$(VERSION)-single β†’ v1-latest-single" + @echo " πŸ“¦ GHCR:" + @echo " - $(GHCR_IMAGE):$(VERSION) β†’ v1-latest" + @echo " - $(GHCR_IMAGE):$(VERSION)-single β†’ v1-latest-single" + +# Full release: push version AND update latest tags +docker-release: docker-push-latest + @echo "βœ… Full release complete for version $(VERSION)" + +tag: + @version=$$(grep '^version = ' pyproject.toml | sed 's/version = "\(.*\)"/\1/'); \ + echo "Creating tag v$$version"; \ + git tag "v$$version"; \ + git push origin "v$$version" + + +dev: + docker compose -f docker-compose.dev.yml up --build + +full: + docker compose -f docker-compose.full.yml up --build + + +api: + uv run run_api.py + +# === Worker Management === +.PHONY: worker worker-start worker-stop worker-restart + +worker: worker-start + +worker-start: + @echo "Starting surreal-commands worker..." + uv run --env-file .env surreal-commands-worker --import-modules commands + +worker-stop: + @echo "Stopping surreal-commands worker..." + pkill -f "surreal-commands-worker" || true + +worker-restart: worker-stop + @sleep 2 + @$(MAKE) worker-start + +# === Service Management === +start-all: + @echo "πŸš€ Starting Open Notebook (Database + API + Worker + Frontend)..." + @echo "πŸ“Š Starting SurrealDB..." + @docker compose -f docker-compose.dev.yml up -d surrealdb + @sleep 3 + @echo "πŸ”§ Starting API backend..." + @uv run run_api.py & + @sleep 3 + @echo "βš™οΈ Starting background worker..." + @uv run --env-file .env surreal-commands-worker --import-modules commands & + @sleep 2 + @echo "🌐 Starting Next.js frontend..." + @echo "βœ… All services started!" + @echo "πŸ“± Frontend: http://localhost:3000" + @echo "πŸ”— API: http://localhost:5055" + @echo "πŸ“š API Docs: http://localhost:5055/docs" + cd frontend && npm run dev + +stop-all: + @echo "πŸ›‘ Stopping all Open Notebook services..." + @pkill -f "next dev" || true + @pkill -f "surreal-commands-worker" || true + @pkill -f "run_api.py" || true + @pkill -f "uvicorn api.main:app" || true + @docker compose down + @echo "βœ… All services stopped!" + +status: + @echo "πŸ“Š Open Notebook Service Status:" + @echo "Database (SurrealDB):" + @docker compose ps surrealdb 2>/dev/null || echo " ❌ Not running" + @echo "API Backend:" + @pgrep -f "run_api.py\|uvicorn api.main:app" >/dev/null && echo " βœ… Running" || echo " ❌ Not running" + @echo "Background Worker:" + @pgrep -f "surreal-commands-worker" >/dev/null && echo " βœ… Running" || echo " ❌ Not running" + @echo "Next.js Frontend:" + @pgrep -f "next dev" >/dev/null && echo " βœ… Running" || echo " ❌ Not running" + +# === Documentation Export === +export-docs: + @echo "πŸ“š Exporting documentation..." + @uv run python scripts/export_docs.py + @echo "βœ… Documentation export complete!" + +# === Cleanup === +clean-cache: + @echo "🧹 Cleaning cache directories..." + @find . -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true + @find . -name ".mypy_cache" -type d -exec rm -rf {} + 2>/dev/null || true + @find . -name ".ruff_cache" -type d -exec rm -rf {} + 2>/dev/null || true + @find . -name ".pytest_cache" -type d -exec rm -rf {} + 2>/dev/null || true + @find . -name "*.pyc" -type f -delete 2>/dev/null || true + @find . -name "*.pyo" -type f -delete 2>/dev/null || true + @find . -name "*.pyd" -type f -delete 2>/dev/null || true + @echo "βœ… Cache directories cleaned!" \ No newline at end of file diff --git a/QUICK_FIX.md b/QUICK_FIX.md new file mode 100644 index 0000000000000000000000000000000000000000..cf1ef4fa5412c3726de1420b40001e3c91f000bc --- /dev/null +++ b/QUICK_FIX.md @@ -0,0 +1,99 @@ +# 🎯 Railway Quick Fix - Copy & Paste Ready + +## Variables to CHANGE in Railway Dashboard + +### 1. Fix SURREAL_URL (CRITICAL) +```bash +# ❌ Remove or change this: +SURREAL_URL=ws://localhost:8000/rpc + +# βœ… Use this instead: +SURREAL_URL=ws://127.0.0.1:8000/rpc +``` + +### 2. Add INTERNAL_API_URL (CRITICAL) +```bash +# βœ… Add this new variable: +INTERNAL_API_URL=http://127.0.0.1:5055 +``` + +### 3. Update API_URL After First Deploy +```bash +# ❌ Current: +API_URL=http://localhost:5055 + +# βœ… Change to YOUR Railway domain (after you get it): +API_URL=https://your-app-production-xxxx.up.railway.app +``` + +## Variables to ADD in Railway Dashboard + +### 4. Add Your AI API Keys +```bash +# Google Gemini (Required for your setup) +GOOGLE_API_KEY=paste_your_gemini_key_here + +# Groq (Required for your setup) +GROQ_API_KEY=paste_your_groq_key_here + +# If using Ollama for Llama models (Optional) +OLLAMA_API_BASE=http://your-ollama-host:11434 +``` + +## Variables to KEEP (Don't Change) + +These are already correct in your Railway: +```bash +βœ… SURREAL_USER=root +βœ… SURREAL_PASSWORD=root +βœ… SURREAL_NAMESPACE=test +βœ… SURREAL_DATABASE=test +βœ… SURREAL_COMMANDS_MAX_TASKS=5 +βœ… SURREAL_COMMANDS_RETRY_ENABLED=true +βœ… SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +βœ… SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +βœ… SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +βœ… SURREAL_COMMANDS_RETRY_WAIT_MAX=30 +``` + +## Complete Variable List for Railway + +Copy this entire block and set in Railway Variables: + +```plaintext +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test +INTERNAL_API_URL=http://127.0.0.1:5055 +API_URL=https://YOUR_RAILWAY_DOMAIN_HERE +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 +GOOGLE_API_KEY=your_gemini_api_key +GROQ_API_KEY=your_groq_api_key +``` + +## Deployment Steps + +1. **Update Railway Variables** (see above) +2. **Push Code**: + ```powershell + git add . + git commit -m "Fix Railway config and add migrations 15-17" + git push origin main + ``` +3. **Wait for Deploy** (Railway auto-deploys from GitHub) +4. **Get Railway Domain** from Railway Dashboard +5. **Update API_URL** with your actual domain +6. **Verify** at `https://your-domain/api/health` + +## That's It! + +Your deployment should work after these changes. + +Questions? Read `YOUR_RAILWAY_CONFIG.md` for detailed explanations. diff --git a/RAILWAY.md b/RAILWAY.md new file mode 100644 index 0000000000000000000000000000000000000000..dfe17e34fefa93c9468a894575f8b59efde9eacc --- /dev/null +++ b/RAILWAY.md @@ -0,0 +1,197 @@ +# πŸš‚ Railway Deployment Guide for Open Notebook + +## Prerequisites + +- A [Railway](https://railway.app/) account +- At least one AI API key (OpenAI, Anthropic, etc.) + +## Quick Deploy + +### Option 1: Deploy from GitHub (Recommended) + +1. **Fork this repository** to your GitHub account + +2. **Create a new Railway project:** + - Go to [Railway](https://railway.app/) + - Click "New Project" + - Select "Deploy from GitHub repo" + - Choose your forked repository + +3. **Configure Environment Variables:** + + Go to your Railway service β†’ Variables tab and add: + + ```bash + # Required Variables + SURREAL_URL=ws://127.0.0.1:8000/rpc + SURREAL_USER=root + SURREAL_PASSWORD=root + SURREAL_NAMESPACE=open_notebook + SURREAL_DATABASE=production + INTERNAL_API_URL=http://127.0.0.1:5055 + + # Add your AI API key (at least one required) + OPENAI_API_KEY=sk-your-key-here + ``` + +4. **After first deployment, set the API_URL:** + + Once Railway generates your public URL (e.g., `https://your-app.up.railway.app`): + + ```bash + API_URL=https://your-app.up.railway.app + ``` + +5. **Configure Railway Settings:** + - Go to Settings β†’ Networking + - Make sure port 8080 is exposed (Railway should auto-detect this) + - Health check path: `/api/health` + +6. **Redeploy** after adding the API_URL + +### Option 2: Deploy with Railway CLI + +```bash +# Install Railway CLI +npm i -g @railway/cli + +# Login to Railway +railway login + +# Link to project (or create new) +railway link + +# Set environment variables +railway variables set SURREAL_URL=ws://127.0.0.1:8000/rpc +railway variables set SURREAL_USER=root +railway variables set SURREAL_PASSWORD=root +railway variables set SURREAL_NAMESPACE=open_notebook +railway variables set SURREAL_DATABASE=production +railway variables set INTERNAL_API_URL=http://127.0.0.1:5055 +railway variables set OPENAI_API_KEY=sk-your-key-here + +# Deploy +railway up + +# Get your app URL +railway domain + +# Set API_URL with your domain +railway variables set API_URL=https://your-app.up.railway.app +``` + +## Architecture + +This deployment uses the **single-container** architecture: +- βœ… SurrealDB (embedded database) +- βœ… FastAPI backend (port 5055) +- βœ… Background worker +- βœ… Next.js frontend (port 8080) + +All services run in one container managed by Supervisord. + +## Troubleshooting + +### Build Fails + +**Issue:** Build timeout or memory issues + +**Solution:** +- Increase Railway instance size temporarily during build +- Or use pre-built Docker image: + ```dockerfile + FROM lfnovo/open_notebook:v1-latest-single + ``` + +### Service Won't Start + +**Issue:** Container restarts continuously + +**Solution:** Check logs for: +1. Missing environment variables (especially `SURREAL_URL`) +2. Database connection errors +3. Frontend build issues + +### Can't Access the App + +**Issue:** Railway shows running but can't access + +**Solution:** +1. Verify `API_URL` is set to your Railway domain +2. Check that port 8080 is exposed in Railway settings +3. Wait 2-3 minutes after deployment for all services to start + +### Database Migration Errors + +**Issue:** Migration fails on startup + +**Solution:** +- Ensure all required migrations files exist (1-17) +- Check database connection settings +- View logs: `railway logs` or in Railway dashboard + +## Environment Variables Reference + +| Variable | Required | Default | Description | +|----------|----------|---------|-------------| +| `SURREAL_URL` | Yes | - | Database WebSocket URL | +| `SURREAL_USER` | Yes | - | Database username | +| `SURREAL_PASSWORD` | Yes | - | Database password | +| `SURREAL_NAMESPACE` | Yes | - | Database namespace | +| `SURREAL_DATABASE` | Yes | - | Database name | +| `INTERNAL_API_URL` | Yes | - | Internal API endpoint | +| `API_URL` | Yes | - | Public API URL (your Railway domain) | +| `OPENAI_API_KEY` | Yes* | - | OpenAI API key (*or another AI provider) | +| `ANTHROPIC_API_KEY` | No | - | Anthropic API key | +| `GOOGLE_API_KEY` | No | - | Google AI API key | +| `OPEN_NOTEBOOK_PASSWORD` | No | - | Optional app password protection | + +## Persistent Data + +Railway provides **ephemeral storage**, meaning: +- ⚠️ Database data will be lost on redeploys +- ⚠️ Uploaded files will be lost on redeploys + +For production use, consider: +1. Using Railway's **Volume** feature for `/mydata` (database) +2. Using external storage (S3, Cloudinary) for uploads +3. Or deploying to a platform with persistent storage + +## Performance Tips + +1. **Start with Hobby plan** ($5/month) for testing +2. **Upgrade if needed** based on usage +3. **Monitor memory usage** - increase if services crash +4. **Use CDN** for faster frontend loading (Railway Pro) + +## Cost Estimation + +- **Hobby Plan**: ~$5-10/month (includes some usage) +- **Pro Plan**: ~$20-30/month (higher limits) +- Plus: AI API costs (pay per use) + +Railway charges for: +- CPU time +- Memory usage +- Bandwidth + +The single-container deployment is optimized to minimize costs. + +## Support + +- πŸ“– [Full Documentation](../README.md) +- πŸ’¬ [Discord Community](https://discord.gg/37XJPXfz2w) +- πŸ› [GitHub Issues](https://github.com/lfnovo/open-notebook/issues) +- πŸš‚ [Railway Docs](https://docs.railway.app/) + +## Alternative Deployment Options + +If Railway doesn't work for you, consider: + +- **Docker** - Self-hosted on any VPS (DigitalOcean, Linode, etc.) +- **AWS ECS/Fargate** - Managed containers +- **Google Cloud Run** - Serverless containers +- **Azure Container Instances** - Pay-per-use containers +- **Fly.io** - Similar to Railway + +See the main [Deployment Guide](../docs/deployment/index.md) for more options. diff --git a/RAILWAY_CHECKLIST.md b/RAILWAY_CHECKLIST.md new file mode 100644 index 0000000000000000000000000000000000000000..cba83a42339610c4da906c824536cc9af497c2b4 --- /dev/null +++ b/RAILWAY_CHECKLIST.md @@ -0,0 +1,164 @@ +# πŸš€ Railway Deployment Quick Checklist + +## Pre-Deployment +- [ ] Fork repository to your GitHub account +- [ ] Have at least one AI API key ready (OpenAI, Anthropic, etc.) +- [ ] Have Railway account created + +## Step 1: Push Code +```bash +git add . +git commit -m "Add Railway deployment fixes" +git push origin main +``` + +## Step 2: Create Railway Project +- [ ] Go to https://railway.app/ +- [ ] Click "New Project" +- [ ] Select "Deploy from GitHub repo" +- [ ] Choose your forked repository +- [ ] Railway will start building automatically + +## Step 3: Set Environment Variables (CRITICAL!) + +Go to Railway β†’ Your Service β†’ Variables tab + +### Required Variables (Set BEFORE first successful deploy): +```bash +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=open_notebook +SURREAL_DATABASE=production +INTERNAL_API_URL=http://127.0.0.1:5055 +OPENAI_API_KEY=sk-your-actual-openai-key +``` + +- [ ] All required variables set +- [ ] Wait for build to complete +- [ ] Note your Railway domain (e.g., `https://yourapp-production.up.railway.app`) + +## Step 4: Set API_URL (AFTER getting domain) +```bash +API_URL=https://yourapp-production.up.railway.app +``` + +- [ ] API_URL variable added with YOUR actual Railway domain +- [ ] Redeploy triggered (automatic after adding variable) + +## Step 5: Configure Railway Settings +- [ ] Go to Settings β†’ Networking +- [ ] Verify port 8080 is exposed (should auto-detect) +- [ ] Health check path: `/api/health` + +## Step 6: Verify Deployment + +### Check These URLs: +- [ ] `https://yourapp.up.railway.app/` β†’ Should show Open Notebook UI +- [ ] `https://yourapp.up.railway.app/api/health` β†’ Should return `{"status":"ok"}` +- [ ] `https://yourapp.up.railway.app/api/docs` β†’ Should show API documentation + +### Check Railway Logs: +- [ ] All 4 services started: surrealdb, api, worker, frontend +- [ ] No error messages (warnings are OK) +- [ ] Migrations completed successfully +- [ ] Frontend shows "Ready in XXms" + +## Step 7: Test Functionality +- [ ] Create a new notebook +- [ ] Upload a test document +- [ ] Try chat functionality +- [ ] Generate a podcast (optional) + +## Common Issues & Quick Fixes + +### ❌ Build Timeout +**Solution:** Upgrade to Railway Hobby plan ($5/month) for longer build times + +### ❌ Services Keep Restarting +**Solution:** Check environment variables are set correctly, especially `SURREAL_URL` + +### ❌ Frontend Can't Connect to API +**Solution:** Ensure `API_URL` is set to your actual Railway domain (with https://) + +### ❌ Out of Memory +**Solution:** Upgrade Railway plan (single container needs ~2GB RAM) + +### ❌ "Database Connection Failed" +**Solution:** +1. Check `SURREAL_URL=ws://127.0.0.1:8000/rpc` (note: 127.0.0.1, not localhost) +2. Verify SurrealDB service is running in logs + +## Environment Variables Checklist + +### Required (App Won't Work Without These): +- [ ] `SURREAL_URL` +- [ ] `SURREAL_USER` +- [ ] `SURREAL_PASSWORD` +- [ ] `SURREAL_NAMESPACE` +- [ ] `SURREAL_DATABASE` +- [ ] `INTERNAL_API_URL` +- [ ] `API_URL` (add after first deploy) +- [ ] At least one AI API key (OPENAI_API_KEY, ANTHROPIC_API_KEY, etc.) + +### Optional (Add As Needed): +- [ ] `ANTHROPIC_API_KEY` - For Claude models +- [ ] `GOOGLE_API_KEY` - For Gemini models +- [ ] `GROQ_API_KEY` - For Groq models +- [ ] `MISTRAL_API_KEY` - For Mistral models +- [ ] `OPEN_NOTEBOOK_PASSWORD` - For password protection +- [ ] `FIRECRAWL_API_KEY` - For enhanced web scraping +- [ ] `JINA_API_KEY` - For advanced embeddings + +## Success Indicators + +Your deployment is successful when you see in Railway logs: +``` +βœ“ Ready in XXXms +INFO: Application startup complete. +INFO: Uvicorn running on http://0.0.0.0:5055 +Migrations completed successfully. Database is now at version 17 +All services entered RUNNING state +``` + +## Cost Estimation + +**Railway Hobby Plan**: ~$5-10/month +- Includes $5 usage credit +- Covers single container deployment +- Sufficient for testing and small-scale use + +**Plus AI API Costs**: Pay-per-use +- OpenAI: ~$0.002-0.06 per 1K tokens +- Anthropic: Similar pricing +- Varies by model and usage + +## Support + +Need help? +- πŸ“– Read [RAILWAY.md](./RAILWAY.md) for detailed guide +- πŸ’¬ Join [Discord](https://discord.gg/37XJPXfz2w) +- πŸ› Report [GitHub Issues](https://github.com/PremKxmar/se/issues) + +--- + +## After Successful Deployment + +1. **Bookmark your Railway app URL** +2. **Set up volume** (in Railway) for `/mydata` to persist database +3. **Monitor usage** in Railway dashboard +4. **Configure more AI providers** as needed +5. **Secure with password** by setting `OPEN_NOTEBOOK_PASSWORD` + +## Development Workflow + +To update your deployed app: +1. Make changes locally +2. Test with `docker compose up` or `npm run dev` +3. Commit and push to GitHub +4. Railway auto-deploys (if enabled) +5. Verify in Railway logs + +--- + +**Pro Tip:** Copy this checklist and check off items as you complete them! diff --git a/RAILWAY_FIXES.md b/RAILWAY_FIXES.md new file mode 100644 index 0000000000000000000000000000000000000000..da65b26be10f767569d608b7a7b68b56d80fe029 --- /dev/null +++ b/RAILWAY_FIXES.md @@ -0,0 +1,196 @@ +# Railway Deployment Fixes - Summary + +## Issues Identified + +### 1. ⚠️ Next.js Standalone Configuration Conflict +**Problem:** The logs showed: +``` +⚠ "next start" does not work with "output: standalone" configuration. +Use "node .next/standalone/server.js" instead. +``` + +**Root Cause:** `frontend/next.config.ts` had `output: "standalone"` enabled, but the startup command used `npm run start` which calls `next start`. + +**Fix Applied:** +- Disabled standalone mode in `next.config.ts` for Railway deployment +- This allows standard `next start` command to work properly + +### 2. πŸ“¦ Missing Database Migrations (15, 16, 17) +**Problem:** Migration files 15-17 existed but weren't registered in the migration manager, causing potential schema inconsistencies. + +**Fix Applied:** +- Updated `open_notebook/database/async_migrate.py` to include migrations 15, 16, and 17 +- Added both up and down migration files + +### 3. πŸ”§ Railway-Specific Configuration Missing +**Problem:** No Railway-specific configuration files, making deployment harder and less optimized. + +**Fix Applied:** +- Created `railway.json` for Railway build configuration +- Created `Dockerfile.railway` optimized for Railway +- Created `supervisord.railway.conf` with proper PORT env variable handling +- Created `.env.railway` template with all required variables +- Created `RAILWAY.md` comprehensive deployment guide + +### 4. 🌐 Port Configuration for Railway +**Problem:** Railway assigns dynamic PORT, but the config wasn't flexible enough. + +**Fix Applied:** +- Updated supervisord to use `%(ENV_PORT)s` to read Railway's PORT variable +- Ensured frontend binds to the correct port (8080 by default, or Railway's PORT) + +## Files Created + +1. **railway.json** - Railway deployment configuration +2. **Dockerfile.railway** - Railway-optimized Docker build +3. **supervisord.railway.conf** - Railway-specific supervisor config +4. **.env.railway** - Environment variable template for Railway +5. **RAILWAY.md** - Complete deployment guide for Railway users + +## Files Modified + +1. **frontend/next.config.ts** - Disabled standalone output for Railway +2. **open_notebook/database/async_migrate.py** - Added migrations 15, 16, 17 +3. **supervisord.single.conf** - Fixed frontend startup command + +## Deployment Success Indicators + +From your logs, the deployment was actually **mostly successful**: +- βœ… SurrealDB started correctly +- βœ… API server started on port 5055 +- βœ… Worker started successfully +- βœ… Frontend built and started on port 8080 +- βœ… All migrations (1-14) ran successfully +- βœ… All services entered RUNNING state + +The warning about standalone mode was **not blocking deployment**, but could cause issues in production. + +## What Was Actually Wrong? + +Looking at your logs more carefully, there's **NO ERROR** - the deployment was successful! + +The confusion might be: +1. The supervisor warning about running as root (not critical) +2. The Next.js standalone warning (now fixed) +3. Missing pytesseract module (optional OCR feature) + +These are **warnings**, not errors. The app should be working. + +## How to Deploy to Railway Now + +### Step 1: Push Changes to GitHub +```bash +cd c:\sem6-real\studyrocket\notebookllm\open-notebook +git add . +git commit -m "Add Railway deployment configuration and fixes" +git push origin main +``` + +### Step 2: Configure Railway Environment Variables + +In Railway dashboard, add these variables: + +**Required:** +```env +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=open_notebook +SURREAL_DATABASE=production +INTERNAL_API_URL=http://127.0.0.1:5055 +OPENAI_API_KEY=your_actual_key_here +``` + +### Step 3: Set API_URL After First Deploy + +After Railway generates your domain (e.g., `https://your-app-production-xxxx.up.railway.app`): + +```env +API_URL=https://your-app-production-xxxx.up.railway.app +``` + +Then redeploy. + +### Step 4: Verify Deployment + +Check these endpoints: +- `https://your-app.up.railway.app/` - Frontend UI +- `https://your-app.up.railway.app/api/health` - API health check +- `https://your-app.up.railway.app/api/docs` - API documentation + +## Troubleshooting + +### If Build Times Out +Railway free tier has build time limits. Solutions: +1. Upgrade to Hobby plan ($5/month) +2. Use pre-built image: `FROM lfnovo/open_notebook:v1-latest-single` + +### If App Crashes After Deploy +1. Check Railway logs for actual errors +2. Verify all environment variables are set +3. Wait 2-3 minutes - services need time to start + +### If Frontend Can't Connect to API +1. Ensure `API_URL` is set to your Railway domain +2. Check that port 8080 is exposed (Railway auto-detects) +3. Verify `INTERNAL_API_URL=http://127.0.0.1:5055` + +## Testing Locally + +Before pushing to Railway, test with Docker: + +```powershell +# Build Railway Dockerfile +docker build -f Dockerfile.railway -t open-notebook-railway . + +# Run with Railway-like environment +docker run -p 8080:8080 -p 5055:5055 ` + -e PORT=8080 ` + -e SURREAL_URL=ws://127.0.0.1:8000/rpc ` + -e SURREAL_USER=root ` + -e SURREAL_PASSWORD=root ` + -e SURREAL_NAMESPACE=open_notebook ` + -e SURREAL_DATABASE=production ` + -e INTERNAL_API_URL=http://127.0.0.1:5055 ` + -e API_URL=http://localhost:8080 ` + -e OPENAI_API_KEY=your_key ` + open-notebook-railway +``` + +Access at: http://localhost:8080 + +## Next Steps + +1. βœ… **Commit and push** all changes to GitHub +2. βœ… **Configure environment variables** in Railway +3. βœ… **Deploy** from GitHub in Railway +4. βœ… **Set API_URL** after getting your domain +5. βœ… **Redeploy** to apply API_URL +6. βœ… **Test** all functionality + +## Additional Notes + +- **Database persistence**: Railway containers are ephemeral. For production, consider: + - Using Railway Volumes for `/mydata` (database storage) + - Exporting/importing data periodically + - Using external database (more expensive) + +- **Costs**: Railway charges for: + - CPU usage + - Memory usage + - Bandwidth + - Start with Hobby plan ($5/mo) for testing + +- **Performance**: Single container runs 4 services, so: + - May need 2GB+ RAM for smooth operation + - Consider upgrading Railway plan if services crash + +## Support Resources + +- πŸ“– [RAILWAY.md](./RAILWAY.md) - Full Railway deployment guide +- πŸ’¬ [Discord Community](https://discord.gg/37XJPXfz2w) +- πŸ› [GitHub Issues](https://github.com/lfnovo/open-notebook/issues) + +--- + +**Important:** Your deployment logs show the app **IS WORKING**. The issues were warnings, not blocking errors. These fixes will make the deployment more robust and eliminate the warnings. diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c120135fdc1eaccdf3960dbaaf2634f5e15e44b6 --- /dev/null +++ b/README.md @@ -0,0 +1,9 @@ +--- +title: Open Notebook +emoji: πŸ““ +colorFrom: blue +colorTo: green +sdk: docker +pinned: false +app_port: 7860 +--- \ No newline at end of file diff --git a/README_HUGGINGFACE.md b/README_HUGGINGFACE.md new file mode 100644 index 0000000000000000000000000000000000000000..e933b51b1f0514a993c84377dc57092b650028a9 --- /dev/null +++ b/README_HUGGINGFACE.md @@ -0,0 +1,53 @@ +--- +title: Open Notebook +emoji: πŸ““ +colorFrom: blue +colorTo: green +sdk: docker +pinned: false +license: mit +--- + +# Open Notebook - Hugging Face Spaces Deployment + +An open source, privacy-focused alternative to Google's Notebook LM! + +This Space runs Open Notebook with a FastAPI backend and embedded SurrealDB database. + +## Features + +- πŸ”’ **Control your data** - Keep your research private and secure +- πŸ€– **Choose your AI models** - Support for 16+ providers including OpenAI, Anthropic, Ollama, and more +- πŸ“š **Organize multi-modal content** - PDFs, videos, audio, web pages +- πŸŽ™οΈ **Generate professional podcasts** - Advanced multi-speaker podcast generation +- πŸ” **Search intelligently** - Full-text and vector search across all your content +- πŸ’¬ **Chat with context** - AI conversations powered by your research + +## Configuration + +To use this Space, you need to set the following secrets in the Space settings: + +### Required API Keys (set at least one): +- `OPENAI_API_KEY` - For OpenAI models (GPT-4, GPT-3.5) +- `ANTHROPIC_API_KEY` - For Claude models +- `GOOGLE_API_KEY` - For Gemini models + +### Optional Configuration: +- `GROQ_API_KEY` - For Groq models +- `MISTRAL_API_KEY` - For Mistral models + +## Usage + +Once deployed, access the API at: +- API Documentation: `https://your-space-name.hf.space/docs` +- Health Check: `https://your-space-name.hf.space/health` + +## Note + +This deployment uses in-memory SurrealDB storage. Data will be lost when the Space restarts. +For persistent storage, consider using an external SurrealDB instance. + +## Learn More + +- [GitHub Repository](https://github.com/baveshraam/software-eng-proj) +- [Original Project](https://github.com/lfnovo/open-notebook) diff --git a/YOUR_RAILWAY_CONFIG.md b/YOUR_RAILWAY_CONFIG.md new file mode 100644 index 0000000000000000000000000000000000000000..067323e10921c90167ac75d3de54c707dac4cdb7 --- /dev/null +++ b/YOUR_RAILWAY_CONFIG.md @@ -0,0 +1,286 @@ +# πŸš‚ Your Railway Configuration - Ready to Deploy + +## Your Current Setup Analysis + +Based on your existing Railway variables, your configuration uses: +- βœ… **Database**: `test` namespace and database (not production) +- βœ… **Multiple AI Providers**: Gemini (Google), Groq, Llama +- βœ… **Worker Configuration**: 5 concurrent tasks with retry logic +- βœ… **Proper retry settings**: Exponential jitter for reliability + +## STEP 1: Current Railway Variables (What You Already Have) + +These are already set in your Railway deployment: + +```bash +# Database - Already Configured βœ… +SURREAL_URL=ws://localhost:8000/rpc # ⚠️ NEEDS FIX (see below) +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test + +# API URL - Already Configured βœ… +API_URL=http://localhost:5055 # ⚠️ NEEDS UPDATE (see below) + +# Worker & Retry - Already Configured βœ… +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 +``` + +## STEP 2: Critical Fixes Needed + +### Fix 1: Change `localhost` to `127.0.0.1` for SURREAL_URL + +**In Railway Dashboard β†’ Variables:** + +❌ **Current (WRONG):** +```bash +SURREAL_URL=ws://localhost:8000/rpc +``` + +βœ… **Change to (CORRECT):** +```bash +SURREAL_URL=ws://127.0.0.1:8000/rpc +``` + +**Why?** Railway's container networking requires `127.0.0.1` instead of `localhost`. + +### Fix 2: Add INTERNAL_API_URL + +**Add this new variable in Railway:** +```bash +INTERNAL_API_URL=http://127.0.0.1:5055 +``` + +**Why?** Next.js needs this for server-side API proxying. + +### Fix 3: Update API_URL to Your Railway Domain + +**After your first successful deploy:** + +❌ **Current:** +```bash +API_URL=http://localhost:5055 +``` + +βœ… **Change to YOUR Railway domain:** +```bash +API_URL=https://your-app-production-xxxx.up.railway.app +``` + +**How to find your Railway domain:** +1. Go to Railway Dashboard β†’ Your Service +2. Look at the "Deployments" tab +3. Copy the domain (e.g., `https://se-production-1234.up.railway.app`) +4. Paste it as the API_URL value (without `/api` at the end) + +## STEP 3: Add Your AI API Keys + +You mentioned using **Gemini, Groq, and Llama**. Add these variables: + +### For Google Gemini (Required) +```bash +GOOGLE_API_KEY=your_actual_gemini_api_key +``` +Get it at: https://makersuite.google.com/app/apikey + +### For Groq (Required) +```bash +GROQ_API_KEY=your_actual_groq_api_key +``` +Get it at: https://console.groq.com/keys + +### For Llama via Ollama (If applicable) +If you're running Ollama somewhere accessible: +```bash +OLLAMA_API_BASE=http://your-ollama-host:11434 +``` + +**OR** if using Llama via Groq (most common): +- No extra configuration needed - Groq provides Llama models + +### Optional: Other Providers +If you want to add more providers later: +```bash +# OpenAI (optional) +OPENAI_API_KEY=sk-your_key + +# Anthropic Claude (optional) +ANTHROPIC_API_KEY=sk-ant-your_key + +# Mistral (optional) +MISTRAL_API_KEY=your_key +``` + +## STEP 4: Complete Railway Variables List + +Copy this EXACT configuration to Railway: + +```bash +# ============================================ +# DATABASE (Keep as-is) +# ============================================ +SURREAL_URL=ws://127.0.0.1:8000/rpc +SURREAL_USER=root +SURREAL_PASSWORD=root +SURREAL_NAMESPACE=test +SURREAL_DATABASE=test + +# ============================================ +# API CONFIGURATION +# ============================================ +INTERNAL_API_URL=http://127.0.0.1:5055 +API_URL=https://YOUR-RAILWAY-DOMAIN.up.railway.app + +# ============================================ +# WORKER & RETRY (Keep as-is) +# ============================================ +SURREAL_COMMANDS_MAX_TASKS=5 +SURREAL_COMMANDS_RETRY_ENABLED=true +SURREAL_COMMANDS_RETRY_MAX_ATTEMPTS=3 +SURREAL_COMMANDS_RETRY_WAIT_STRATEGY=exponential_jitter +SURREAL_COMMANDS_RETRY_WAIT_MIN=1 +SURREAL_COMMANDS_RETRY_WAIT_MAX=30 + +# ============================================ +# AI PROVIDERS (ADD YOUR KEYS) +# ============================================ +GOOGLE_API_KEY=your_actual_gemini_key +GROQ_API_KEY=your_actual_groq_key + +# Optional: If using Ollama for Llama +# OLLAMA_API_BASE=http://your-ollama-host:11434 + +# Optional: Other providers +# OPENAI_API_KEY=sk-your_key +# ANTHROPIC_API_KEY=sk-ant-your_key +``` + +## STEP 5: Deploy Order + +### A. Before Redeploying - Set These First: +1. βœ… Change `SURREAL_URL` to use `127.0.0.1` +2. βœ… Add `INTERNAL_API_URL=http://127.0.0.1:5055` +3. βœ… Add `GOOGLE_API_KEY` (your Gemini key) +4. βœ… Add `GROQ_API_KEY` (your Groq key) +5. ⏸️ Keep `API_URL` as is for now (update after deploy) + +### B. Push Code Changes: +```powershell +cd c:\sem6-real\studyrocket\notebookllm\open-notebook +git add . +git commit -m "Fix Railway deployment configuration" +git push origin main +``` + +### C. After Successful Deploy: +1. βœ… Copy your Railway domain +2. βœ… Update `API_URL` to your Railway domain +3. βœ… Railway will auto-redeploy + +## STEP 6: Verification Checklist + +After deployment completes, verify: + +- [ ] Service shows "RUNNING" in Railway +- [ ] Check logs: "Application startup complete" +- [ ] Check logs: "Migrations completed successfully. Database is now at version 17" +- [ ] Visit `https://your-domain.up.railway.app/` β†’ Should load UI +- [ ] Visit `https://your-domain.up.railway.app/api/health` β†’ Should return `{"status":"ok"}` +- [ ] Try creating a notebook in the UI +- [ ] Test AI features (chat, generation) + +## Common Issues Specific to Your Setup + +### Issue: "Database Connection Failed" +**Cause:** Using `localhost` instead of `127.0.0.1` +**Solution:** Change `SURREAL_URL=ws://127.0.0.1:8000/rpc` + +### Issue: "Unable to Connect to API Server" +**Cause:** `INTERNAL_API_URL` not set or `API_URL` pointing to localhost +**Solution:** +- Set `INTERNAL_API_URL=http://127.0.0.1:5055` +- Set `API_URL=https://your-railway-domain.up.railway.app` + +### Issue: "AI Model Not Available" +**Cause:** API keys not set or incorrect +**Solution:** +- Verify `GOOGLE_API_KEY` is set correctly +- Verify `GROQ_API_KEY` is set correctly +- Check API key validity at provider dashboards + +### Issue: "Migrations Stuck at Version 14" +**Cause:** Code changes not deployed +**Solution:** +- Ensure you pushed the latest code with migrations 15-17 +- Check Railway logs for migration errors +- Verify all migration files exist in the repo + +## Model Configuration by Provider + +Based on your setup, here's which models you can use: + +### Via Gemini (GOOGLE_API_KEY) +- βœ… `gemini-pro` - General purpose +- βœ… `gemini-pro-vision` - Image understanding +- βœ… `gemini-1.5-pro` - Long context (1M tokens) +- βœ… `gemini-1.5-flash` - Fast & efficient +- βœ… Text embeddings via Gemini + +### Via Groq (GROQ_API_KEY) +- βœ… `llama-3.1-70b-versatile` - Best Llama model +- βœ… `llama-3.1-8b-instant` - Fast Llama +- βœ… `llama3-70b-8192` - Older Llama version +- βœ… `mixtral-8x7b-32768` - Mixtral model +- βœ… `gemma2-9b-it` - Google's Gemma + +### Via Ollama (if configured) +- βœ… Any locally installed model +- βœ… `llama3:latest`, `llama3.1:latest` +- βœ… `mistral:latest`, `mixtral:latest` +- βœ… Custom models + +## Cost Estimation for Your Setup + +### Gemini (Google) +- **Free Tier**: 60 requests/minute +- **Paid**: $0.50 per 1M input tokens (very affordable) +- **Best for**: Long context, embeddings, general use + +### Groq +- **Free Tier**: Generous free tier +- **Paid**: Very competitive pricing +- **Best for**: Fast inference, Llama models + +### Total Monthly Cost (Estimated) +- **Light use** (testing): $0-5/month +- **Medium use** (regular): $10-30/month +- **Heavy use** (production): $50-100/month + +Plus Railway hosting: ~$5-10/month + +## Next Steps + +1. **Update variables** in Railway as shown above +2. **Push code** to GitHub +3. **Wait for deploy** (5-10 minutes) +4. **Update API_URL** with your Railway domain +5. **Test all features** with your AI providers + +## Support + +If you encounter issues: +1. Check Railway logs: Dashboard β†’ Deployments β†’ View Logs +2. Look for specific error messages +3. Verify all environment variables are set +4. Test API keys at provider dashboards +5. Join Discord for help: https://discord.gg/37XJPXfz2w + +--- + +**Your setup is nearly perfect!** Just make the three fixes above (127.0.0.1, INTERNAL_API_URL, and API_URL) and add your AI keys, then you're good to go! πŸš€ diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/api/auth.py b/api/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..04895c8007fedcf930a85d1b3026352d31578b32 --- /dev/null +++ b/api/auth.py @@ -0,0 +1,100 @@ +import os +from typing import Optional + +from fastapi import HTTPException, Request +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import JSONResponse + + +class PasswordAuthMiddleware(BaseHTTPMiddleware): + """ + Middleware to check password authentication for all API requests. + Only active when OPEN_NOTEBOOK_PASSWORD environment variable is set. + """ + + def __init__(self, app, excluded_paths: Optional[list] = None): + super().__init__(app) + self.password = os.environ.get("OPEN_NOTEBOOK_PASSWORD") + self.excluded_paths = excluded_paths or ["/", "/health", "/docs", "/openapi.json", "/redoc"] + + async def dispatch(self, request: Request, call_next): + # Skip authentication if no password is set + if not self.password: + return await call_next(request) + + # Skip authentication for excluded paths + if request.url.path in self.excluded_paths: + return await call_next(request) + + # Skip authentication for CORS preflight requests (OPTIONS) + if request.method == "OPTIONS": + return await call_next(request) + + # Check authorization header + auth_header = request.headers.get("Authorization") + + if not auth_header: + return JSONResponse( + status_code=401, + content={"detail": "Missing authorization header"}, + headers={"WWW-Authenticate": "Bearer"} + ) + + # Expected format: "Bearer {password}" + try: + scheme, credentials = auth_header.split(" ", 1) + if scheme.lower() != "bearer": + raise ValueError("Invalid authentication scheme") + except ValueError: + return JSONResponse( + status_code=401, + content={"detail": "Invalid authorization header format"}, + headers={"WWW-Authenticate": "Bearer"} + ) + + # Check password + if credentials != self.password: + return JSONResponse( + status_code=401, + content={"detail": "Invalid password"}, + headers={"WWW-Authenticate": "Bearer"} + ) + + # Password is correct, proceed with the request + response = await call_next(request) + return response + + +# Optional: HTTPBearer security scheme for OpenAPI documentation +security = HTTPBearer(auto_error=False) + + +def check_api_password(credentials: Optional[HTTPAuthorizationCredentials] = None) -> bool: + """ + Utility function to check API password. + Can be used as a dependency in individual routes if needed. + """ + password = os.environ.get("OPEN_NOTEBOOK_PASSWORD") + + # No password set, allow access + if not password: + return True + + # No credentials provided + if not credentials: + raise HTTPException( + status_code=401, + detail="Missing authorization", + headers={"WWW-Authenticate": "Bearer"}, + ) + + # Check password + if credentials.credentials != password: + raise HTTPException( + status_code=401, + detail="Invalid password", + headers={"WWW-Authenticate": "Bearer"}, + ) + + return True \ No newline at end of file diff --git a/api/chat_service.py b/api/chat_service.py new file mode 100644 index 0000000000000000000000000000000000000000..34bc3cf7ad198833559a59120430f080989bc385 --- /dev/null +++ b/api/chat_service.py @@ -0,0 +1,174 @@ +""" +Chat service for API operations. +Provides async interface for chat functionality. +""" +import os +from typing import Any, Dict, List, Optional + +import httpx +from loguru import logger + + +class ChatService: + """Service for chat-related API operations""" + + def __init__(self): + self.base_url = os.getenv("API_BASE_URL", "http://127.0.0.1:5055") + # Add authentication header if password is set + self.headers = {} + password = os.getenv("OPEN_NOTEBOOK_PASSWORD") + if password: + self.headers["Authorization"] = f"Bearer {password}" + + async def get_sessions(self, notebook_id: str) -> List[Dict[str, Any]]: + """Get all chat sessions for a notebook""" + try: + async with httpx.AsyncClient() as client: + response = await client.get( + f"{self.base_url}/api/chat/sessions", + params={"notebook_id": notebook_id}, + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error fetching chat sessions: {str(e)}") + raise + + async def create_session( + self, + notebook_id: str, + title: Optional[str] = None, + model_override: Optional[str] = None, + ) -> Dict[str, Any]: + """Create a new chat session""" + try: + data: Dict[str, Any] = {"notebook_id": notebook_id} + if title is not None: + data["title"] = title + if model_override is not None: + data["model_override"] = model_override + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{self.base_url}/api/chat/sessions", + json=data, + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error creating chat session: {str(e)}") + raise + + async def get_session(self, session_id: str) -> Dict[str, Any]: + """Get a specific session with messages""" + try: + async with httpx.AsyncClient() as client: + response = await client.get( + f"{self.base_url}/api/chat/sessions/{session_id}", + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error fetching session: {str(e)}") + raise + + async def update_session( + self, + session_id: str, + title: Optional[str] = None, + model_override: Optional[str] = None, + ) -> Dict[str, Any]: + """Update session properties""" + try: + data: Dict[str, Any] = {} + if title is not None: + data["title"] = title + if model_override is not None: + data["model_override"] = model_override + + if not data: + raise ValueError("At least one field must be provided to update a session") + + async with httpx.AsyncClient() as client: + response = await client.put( + f"{self.base_url}/api/chat/sessions/{session_id}", + json=data, + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error updating session: {str(e)}") + raise + + async def delete_session(self, session_id: str) -> Dict[str, Any]: + """Delete a chat session""" + try: + async with httpx.AsyncClient() as client: + response = await client.delete( + f"{self.base_url}/api/chat/sessions/{session_id}", + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error deleting session: {str(e)}") + raise + + async def execute_chat( + self, + session_id: str, + message: str, + context: Dict[str, Any], + model_override: Optional[str] = None, + ) -> Dict[str, Any]: + """Execute a chat request""" + try: + data = { + "session_id": session_id, + "message": message, + "context": context + } + if model_override is not None: + data["model_override"] = model_override + + # Short connect timeout (10s), long read timeout (10 min) for Ollama/local LLMs + timeout = httpx.Timeout(connect=10.0, read=600.0, write=30.0, pool=10.0) + async with httpx.AsyncClient(timeout=timeout) as client: + response = await client.post( + f"{self.base_url}/api/chat/execute", + json=data, + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error executing chat: {str(e)}") + raise + + async def build_context(self, notebook_id: str, context_config: Dict[str, Any]) -> Dict[str, Any]: + """Build context for a notebook""" + try: + data = { + "notebook_id": notebook_id, + "context_config": context_config + } + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{self.base_url}/api/chat/context", + json=data, + headers=self.headers + ) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error building context: {str(e)}") + raise + + +# Global instance +chat_service = ChatService() diff --git a/api/client.py b/api/client.py new file mode 100644 index 0000000000000000000000000000000000000000..016d31ce343a597c810af675c8179c3de8028551 --- /dev/null +++ b/api/client.py @@ -0,0 +1,473 @@ +""" +API client for Open Notebook API. +This module provides a client interface to interact with the Open Notebook API. +""" + +import os +from typing import Any, Dict, List, Optional, Union + +import httpx +from loguru import logger + + +class APIClient: + """Client for Open Notebook API.""" + + def __init__(self, base_url: Optional[str] = None): + self.base_url = base_url or os.getenv("API_BASE_URL", "http://127.0.0.1:5055") + # Timeout increased to 5 minutes (300s) to accommodate slow LLM operations + # (transformations, insights) on slower hardware (Ollama, LM Studio, remote APIs) + # Configurable via API_CLIENT_TIMEOUT environment variable (in seconds) + timeout_str = os.getenv("API_CLIENT_TIMEOUT", "300.0") + try: + timeout_value = float(timeout_str) + # Validate timeout is within reasonable bounds (30s - 3600s / 1 hour) + if timeout_value < 30: + logger.warning(f"API_CLIENT_TIMEOUT={timeout_value}s is too low, using minimum of 30s") + timeout_value = 30.0 + elif timeout_value > 3600: + logger.warning(f"API_CLIENT_TIMEOUT={timeout_value}s is too high, using maximum of 3600s") + timeout_value = 3600.0 + self.timeout = timeout_value + except ValueError: + logger.error(f"Invalid API_CLIENT_TIMEOUT value '{timeout_str}', using default 300s") + self.timeout = 300.0 + + # Add authentication header if password is set + self.headers = {} + password = os.getenv("OPEN_NOTEBOOK_PASSWORD") + if password: + self.headers["Authorization"] = f"Bearer {password}" + + def _make_request( + self, method: str, endpoint: str, timeout: Optional[float] = None, **kwargs + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Make HTTP request to the API.""" + url = f"{self.base_url}{endpoint}" + request_timeout = timeout if timeout is not None else self.timeout + + # Merge headers + headers = kwargs.get("headers", {}) + headers.update(self.headers) + kwargs["headers"] = headers + + try: + with httpx.Client(timeout=request_timeout) as client: + response = client.request(method, url, **kwargs) + response.raise_for_status() + return response.json() + except httpx.RequestError as e: + logger.error(f"Request error for {method} {url}: {str(e)}") + raise ConnectionError(f"Failed to connect to API: {str(e)}") + except httpx.HTTPStatusError as e: + logger.error( + f"HTTP error {e.response.status_code} for {method} {url}: {e.response.text}" + ) + raise RuntimeError( + f"API request failed: {e.response.status_code} - {e.response.text}" + ) + except Exception as e: + logger.error(f"Unexpected error for {method} {url}: {str(e)}") + raise + + # Notebooks API methods + def get_notebooks( + self, archived: Optional[bool] = None, order_by: str = "updated desc" + ) -> List[Dict[Any, Any]]: + """Get all notebooks.""" + params: Dict[str, Any] = {"order_by": order_by} + if archived is not None: + params["archived"] = str(archived).lower() + + result = self._make_request("GET", "/api/notebooks", params=params) + return result if isinstance(result, list) else [result] + + def create_notebook(self, name: str, description: str = "") -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new notebook.""" + data = {"name": name, "description": description} + return self._make_request("POST", "/api/notebooks", json=data) + + def get_notebook(self, notebook_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get a specific notebook.""" + return self._make_request("GET", f"/api/notebooks/{notebook_id}") + + def update_notebook(self, notebook_id: str, **updates) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update a notebook.""" + return self._make_request("PUT", f"/api/notebooks/{notebook_id}", json=updates) + + def delete_notebook(self, notebook_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete a notebook.""" + return self._make_request("DELETE", f"/api/notebooks/{notebook_id}") + + # Search API methods + def search( + self, + query: str, + search_type: str = "text", + limit: int = 100, + search_sources: bool = True, + search_notes: bool = True, + minimum_score: float = 0.2, + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Search the knowledge base.""" + data = { + "query": query, + "type": search_type, + "limit": limit, + "search_sources": search_sources, + "search_notes": search_notes, + "minimum_score": minimum_score, + } + return self._make_request("POST", "/api/search", json=data) + + def ask_simple( + self, + question: str, + strategy_model: str, + answer_model: str, + final_answer_model: str, + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Ask the knowledge base a question (simple, non-streaming).""" + data = { + "question": question, + "strategy_model": strategy_model, + "answer_model": answer_model, + "final_answer_model": final_answer_model, + } + # Use configured timeout for long-running ask operations + return self._make_request( + "POST", "/api/search/ask/simple", json=data, timeout=self.timeout + ) + + # Models API methods + def get_models(self, model_type: Optional[str] = None) -> List[Dict[Any, Any]]: + """Get all models with optional type filtering.""" + params = {} + if model_type: + params["type"] = model_type + result = self._make_request("GET", "/api/models", params=params) + return result if isinstance(result, list) else [result] + + def create_model(self, name: str, provider: str, model_type: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new model.""" + data = { + "name": name, + "provider": provider, + "type": model_type, + } + return self._make_request("POST", "/api/models", json=data) + + def delete_model(self, model_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete a model.""" + return self._make_request("DELETE", f"/api/models/{model_id}") + + def get_default_models(self) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get default model assignments.""" + return self._make_request("GET", "/api/models/defaults") + + def update_default_models(self, **defaults) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update default model assignments.""" + return self._make_request("PUT", "/api/models/defaults", json=defaults) + + # Transformations API methods + def get_transformations(self) -> List[Dict[Any, Any]]: + """Get all transformations.""" + result = self._make_request("GET", "/api/transformations") + return result if isinstance(result, list) else [result] + + def create_transformation( + self, + name: str, + title: str, + description: str, + prompt: str, + apply_default: bool = False, + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new transformation.""" + data = { + "name": name, + "title": title, + "description": description, + "prompt": prompt, + "apply_default": apply_default, + } + return self._make_request("POST", "/api/transformations", json=data) + + def get_transformation(self, transformation_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get a specific transformation.""" + return self._make_request("GET", f"/api/transformations/{transformation_id}") + + def update_transformation(self, transformation_id: str, **updates) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update a transformation.""" + return self._make_request( + "PUT", f"/api/transformations/{transformation_id}", json=updates + ) + + def delete_transformation(self, transformation_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete a transformation.""" + return self._make_request("DELETE", f"/api/transformations/{transformation_id}") + + def execute_transformation( + self, transformation_id: str, input_text: str, model_id: str + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Execute a transformation on input text.""" + data = { + "transformation_id": transformation_id, + "input_text": input_text, + "model_id": model_id, + } + # Use configured timeout for transformation operations + return self._make_request( + "POST", "/api/transformations/execute", json=data, timeout=self.timeout + ) + + # Notes API methods + def get_notes(self, notebook_id: Optional[str] = None) -> List[Dict[Any, Any]]: + """Get all notes with optional notebook filtering.""" + params = {} + if notebook_id: + params["notebook_id"] = notebook_id + result = self._make_request("GET", "/api/notes", params=params) + return result if isinstance(result, list) else [result] + + def create_note( + self, + content: str, + title: Optional[str] = None, + note_type: str = "human", + notebook_id: Optional[str] = None, + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new note.""" + data = { + "content": content, + "note_type": note_type, + } + if title: + data["title"] = title + if notebook_id: + data["notebook_id"] = notebook_id + return self._make_request("POST", "/api/notes", json=data) + + def get_note(self, note_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get a specific note.""" + return self._make_request("GET", f"/api/notes/{note_id}") + + def update_note(self, note_id: str, **updates) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update a note.""" + return self._make_request("PUT", f"/api/notes/{note_id}", json=updates) + + def delete_note(self, note_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete a note.""" + return self._make_request("DELETE", f"/api/notes/{note_id}") + + # Embedding API methods + def embed_content(self, item_id: str, item_type: str, async_processing: bool = False) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Embed content for vector search.""" + data = { + "item_id": item_id, + "item_type": item_type, + "async_processing": async_processing, + } + # Use configured timeout for embedding operations + return self._make_request("POST", "/api/embed", json=data, timeout=self.timeout) + + def rebuild_embeddings( + self, + mode: str = "existing", + include_sources: bool = True, + include_notes: bool = True, + include_insights: bool = True + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Rebuild embeddings in bulk. + + Note: This operation can take a long time for large databases. + Consider increasing API_CLIENT_TIMEOUT to 600-900s for bulk rebuilds. + """ + data = { + "mode": mode, + "include_sources": include_sources, + "include_notes": include_notes, + "include_insights": include_insights, + } + # Use double the configured timeout for bulk rebuild operations (or configured value if already high) + rebuild_timeout = max(self.timeout, min(self.timeout * 2, 3600.0)) + return self._make_request("POST", "/api/embeddings/rebuild", json=data, timeout=rebuild_timeout) + + def get_rebuild_status(self, command_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get status of a rebuild operation.""" + return self._make_request("GET", f"/api/embeddings/rebuild/{command_id}/status") + + # Settings API methods + def get_settings(self) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get all application settings.""" + return self._make_request("GET", "/api/settings") + + def update_settings(self, **settings) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update application settings.""" + return self._make_request("PUT", "/api/settings", json=settings) + + # Context API methods + def get_notebook_context( + self, notebook_id: str, context_config: Optional[Dict] = None + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get context for a notebook.""" + data: Dict[str, Any] = {"notebook_id": notebook_id} + if context_config: + data["context_config"] = context_config + result = self._make_request( + "POST", f"/api/notebooks/{notebook_id}/context", json=data + ) + return result if isinstance(result, dict) else {} + + # Sources API methods + def get_sources(self, notebook_id: Optional[str] = None) -> List[Dict[Any, Any]]: + """Get all sources with optional notebook filtering.""" + params = {} + if notebook_id: + params["notebook_id"] = notebook_id + result = self._make_request("GET", "/api/sources", params=params) + return result if isinstance(result, list) else [result] + + def create_source( + self, + notebook_id: Optional[str] = None, + notebooks: Optional[List[str]] = None, + source_type: str = "text", + url: Optional[str] = None, + file_path: Optional[str] = None, + content: Optional[str] = None, + title: Optional[str] = None, + transformations: Optional[List[str]] = None, + embed: bool = False, + delete_source: bool = False, + async_processing: bool = False, + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new source.""" + data = { + "type": source_type, + "embed": embed, + "delete_source": delete_source, + "async_processing": async_processing, + } + + # Handle backward compatibility for notebook_id vs notebooks + if notebooks: + data["notebooks"] = notebooks + elif notebook_id: + data["notebook_id"] = notebook_id + else: + raise ValueError("Either notebook_id or notebooks must be provided") + + if url: + data["url"] = url + if file_path: + data["file_path"] = file_path + if content: + data["content"] = content + if title: + data["title"] = title + if transformations: + data["transformations"] = transformations + + # Use configured timeout for source creation (especially PDF processing with OCR) + return self._make_request("POST", "/api/sources/json", json=data, timeout=self.timeout) + + def get_source(self, source_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get a specific source.""" + return self._make_request("GET", f"/api/sources/{source_id}") + + def get_source_status(self, source_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get processing status for a source.""" + return self._make_request("GET", f"/api/sources/{source_id}/status") + + def update_source(self, source_id: str, **updates) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update a source.""" + return self._make_request("PUT", f"/api/sources/{source_id}", json=updates) + + def delete_source(self, source_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete a source.""" + return self._make_request("DELETE", f"/api/sources/{source_id}") + + # Insights API methods + def get_source_insights(self, source_id: str) -> List[Dict[Any, Any]]: + """Get all insights for a specific source.""" + result = self._make_request("GET", f"/api/sources/{source_id}/insights") + return result if isinstance(result, list) else [result] + + def get_insight(self, insight_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get a specific insight.""" + return self._make_request("GET", f"/api/insights/{insight_id}") + + def delete_insight(self, insight_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete a specific insight.""" + return self._make_request("DELETE", f"/api/insights/{insight_id}") + + def save_insight_as_note( + self, insight_id: str, notebook_id: Optional[str] = None + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Convert an insight to a note.""" + data = {} + if notebook_id: + data["notebook_id"] = notebook_id + return self._make_request( + "POST", f"/api/insights/{insight_id}/save-as-note", json=data + ) + + def create_source_insight( + self, source_id: str, transformation_id: str, model_id: Optional[str] = None + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new insight for a source by running a transformation.""" + data = {"transformation_id": transformation_id} + if model_id: + data["model_id"] = model_id + return self._make_request( + "POST", f"/api/sources/{source_id}/insights", json=data + ) + + # Episode Profiles API methods + def get_episode_profiles(self) -> List[Dict[Any, Any]]: + """Get all episode profiles.""" + result = self._make_request("GET", "/api/episode-profiles") + return result if isinstance(result, list) else [result] + + def get_episode_profile(self, profile_name: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get a specific episode profile by name.""" + return self._make_request("GET", f"/api/episode-profiles/{profile_name}") + + def create_episode_profile( + self, + name: str, + description: str = "", + speaker_config: str = "", + outline_provider: str = "", + outline_model: str = "", + transcript_provider: str = "", + transcript_model: str = "", + default_briefing: str = "", + num_segments: int = 5, + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Create a new episode profile.""" + data = { + "name": name, + "description": description, + "speaker_config": speaker_config, + "outline_provider": outline_provider, + "outline_model": outline_model, + "transcript_provider": transcript_provider, + "transcript_model": transcript_model, + "default_briefing": default_briefing, + "num_segments": num_segments, + } + return self._make_request("POST", "/api/episode-profiles", json=data) + + def update_episode_profile(self, profile_id: str, **updates) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Update an episode profile.""" + return self._make_request("PUT", f"/api/episode-profiles/{profile_id}", json=updates) + + def delete_episode_profile(self, profile_id: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Delete an episode profile.""" + return self._make_request("DELETE", f"/api/episode-profiles/{profile_id}") + + +# Global client instance +api_client = APIClient() diff --git a/api/command_service.py b/api/command_service.py new file mode 100644 index 0000000000000000000000000000000000000000..6e9506ba21adb477af4f53e7dace797720ee2308 --- /dev/null +++ b/api/command_service.py @@ -0,0 +1,92 @@ +from typing import Any, Dict, List, Optional + +from loguru import logger +from surreal_commands import get_command_status, submit_command + + +class CommandService: + """Generic service layer for command operations""" + + @staticmethod + async def submit_command_job( + module_name: str, # Actually app_name for surreal-commands + command_name: str, + command_args: Dict[str, Any], + context: Optional[Dict[str, Any]] = None, + ) -> str: + """Submit a generic command job for background processing""" + try: + # Ensure command modules are imported before submitting + # This is needed because submit_command validates against local registry + try: + import commands.podcast_commands # noqa: F401 + except ImportError as import_err: + logger.error(f"Failed to import command modules: {import_err}") + raise ValueError("Command modules not available") + + # surreal-commands expects: submit_command(app_name, command_name, args) + cmd_id = submit_command( + module_name, # This is actually the app name (e.g., "open_notebook") + command_name, # Command name (e.g., "process_text") + command_args, # Input data + ) + # Convert RecordID to string if needed + if not cmd_id: + raise ValueError("Failed to get cmd_id from submit_command") + cmd_id_str = str(cmd_id) + logger.info( + f"Submitted command job: {cmd_id_str} for {module_name}.{command_name}" + ) + return cmd_id_str + + except Exception as e: + logger.error(f"Failed to submit command job: {e}") + raise + + @staticmethod + async def get_command_status(job_id: str) -> Dict[str, Any]: + """Get status of any command job""" + try: + status = await get_command_status(job_id) + return { + "job_id": job_id, + "status": status.status if status else "unknown", + "result": status.result if status else None, + "error_message": getattr(status, "error_message", None) + if status + else None, + "created": str(status.created) + if status and hasattr(status, "created") and status.created + else None, + "updated": str(status.updated) + if status and hasattr(status, "updated") and status.updated + else None, + "progress": getattr(status, "progress", None) if status else None, + } + except Exception as e: + logger.error(f"Failed to get command status: {e}") + raise + + @staticmethod + async def list_command_jobs( + module_filter: Optional[str] = None, + command_filter: Optional[str] = None, + status_filter: Optional[str] = None, + limit: int = 50, + ) -> List[Dict[str, Any]]: + """List command jobs with optional filtering""" + # This will be implemented with proper SurrealDB queries + # For now, return empty list as this is foundation phase + return [] + + @staticmethod + async def cancel_command_job(job_id: str) -> bool: + """Cancel a running command job""" + try: + # Implementation depends on surreal-commands cancellation support + # For now, just log the attempt + logger.info(f"Attempting to cancel job: {job_id}") + return True + except Exception as e: + logger.error(f"Failed to cancel command job: {e}") + raise diff --git a/api/context_service.py b/api/context_service.py new file mode 100644 index 0000000000000000000000000000000000000000..3f6f63fa5145ec558765270b3c18aeb92b1eed2a --- /dev/null +++ b/api/context_service.py @@ -0,0 +1,32 @@ +""" +Context service layer using API. +""" + +from typing import Any, Dict, List, Optional, Union + +from loguru import logger + +from api.client import api_client + + +class ContextService: + """Service layer for context operations using API.""" + + def __init__(self): + logger.info("Using API for context operations") + + def get_notebook_context( + self, + notebook_id: str, + context_config: Optional[Dict] = None + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Get context for a notebook.""" + result = api_client.get_notebook_context( + notebook_id=notebook_id, + context_config=context_config + ) + return result + + +# Global service instance +context_service = ContextService() \ No newline at end of file diff --git a/api/embedding_service.py b/api/embedding_service.py new file mode 100644 index 0000000000000000000000000000000000000000..b3d4d8ecbed006b1ac15f37064747cad543742f2 --- /dev/null +++ b/api/embedding_service.py @@ -0,0 +1,25 @@ +""" +Embedding service layer using API. +""" + +from typing import Any, Dict, List, Union + +from loguru import logger + +from api.client import api_client + + +class EmbeddingService: + """Service layer for embedding operations using API.""" + + def __init__(self): + logger.info("Using API for embedding operations") + + def embed_content(self, item_id: str, item_type: str) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Embed content for vector search.""" + result = api_client.embed_content(item_id=item_id, item_type=item_type) + return result + + +# Global service instance +embedding_service = EmbeddingService() \ No newline at end of file diff --git a/api/episode_profiles_service.py b/api/episode_profiles_service.py new file mode 100644 index 0000000000000000000000000000000000000000..420690efc71d0d355914c21499d61466c5a8db62 --- /dev/null +++ b/api/episode_profiles_service.py @@ -0,0 +1,104 @@ +""" +Episode profiles service layer using API. +""" + +from typing import List + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.podcast import EpisodeProfile + + +class EpisodeProfilesService: + """Service layer for episode profiles operations using API.""" + + def __init__(self): + logger.info("Using API for episode profiles operations") + + def get_all_episode_profiles(self) -> List[EpisodeProfile]: + """Get all episode profiles.""" + profiles_data = api_client.get_episode_profiles() + # Convert API response to EpisodeProfile objects + profiles = [] + for profile_data in profiles_data: + profile = EpisodeProfile( + name=profile_data["name"], + description=profile_data.get("description", ""), + speaker_config=profile_data["speaker_config"], + outline_provider=profile_data["outline_provider"], + outline_model=profile_data["outline_model"], + transcript_provider=profile_data["transcript_provider"], + transcript_model=profile_data["transcript_model"], + default_briefing=profile_data["default_briefing"], + num_segments=profile_data["num_segments"] + ) + profile.id = profile_data["id"] + profiles.append(profile) + return profiles + + def get_episode_profile(self, profile_name: str) -> EpisodeProfile: + """Get a specific episode profile by name.""" + profile_response = api_client.get_episode_profile(profile_name) + profile_data = profile_response if isinstance(profile_response, dict) else profile_response[0] + profile = EpisodeProfile( + name=profile_data["name"], + description=profile_data.get("description", ""), + speaker_config=profile_data["speaker_config"], + outline_provider=profile_data["outline_provider"], + outline_model=profile_data["outline_model"], + transcript_provider=profile_data["transcript_provider"], + transcript_model=profile_data["transcript_model"], + default_briefing=profile_data["default_briefing"], + num_segments=profile_data["num_segments"] + ) + profile.id = profile_data["id"] + return profile + + def create_episode_profile( + self, + name: str, + description: str = "", + speaker_config: str = "", + outline_provider: str = "", + outline_model: str = "", + transcript_provider: str = "", + transcript_model: str = "", + default_briefing: str = "", + num_segments: int = 5, + ) -> EpisodeProfile: + """Create a new episode profile.""" + profile_response = api_client.create_episode_profile( + name=name, + description=description, + speaker_config=speaker_config, + outline_provider=outline_provider, + outline_model=outline_model, + transcript_provider=transcript_provider, + transcript_model=transcript_model, + default_briefing=default_briefing, + num_segments=num_segments, + ) + profile_data = profile_response if isinstance(profile_response, dict) else profile_response[0] + profile = EpisodeProfile( + name=profile_data["name"], + description=profile_data.get("description", ""), + speaker_config=profile_data["speaker_config"], + outline_provider=profile_data["outline_provider"], + outline_model=profile_data["outline_model"], + transcript_provider=profile_data["transcript_provider"], + transcript_model=profile_data["transcript_model"], + default_briefing=profile_data["default_briefing"], + num_segments=profile_data["num_segments"] + ) + profile.id = profile_data["id"] + return profile + + def delete_episode_profile(self, profile_id: str) -> bool: + """Delete an episode profile.""" + api_client.delete_episode_profile(profile_id) + return True + + +# Global service instance +episode_profiles_service = EpisodeProfilesService() \ No newline at end of file diff --git a/api/insights_service.py b/api/insights_service.py new file mode 100644 index 0000000000000000000000000000000000000000..b435519e55b966f98cb38c85d614b62b045c27cf --- /dev/null +++ b/api/insights_service.py @@ -0,0 +1,84 @@ +""" +Insights service layer using API. +""" + +from typing import List, Optional + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.notebook import Note, SourceInsight + + +class InsightsService: + """Service layer for insights operations using API.""" + + def __init__(self): + logger.info("Using API for insights operations") + + def get_source_insights(self, source_id: str) -> List[SourceInsight]: + """Get all insights for a specific source.""" + insights_data = api_client.get_source_insights(source_id) + # Convert API response to SourceInsight objects + insights = [] + for insight_data in insights_data: + insight = SourceInsight( + insight_type=insight_data["insight_type"], + content=insight_data["content"], + ) + insight.id = insight_data["id"] + insight.created = insight_data["created"] + insight.updated = insight_data["updated"] + insights.append(insight) + return insights + + def get_insight(self, insight_id: str) -> SourceInsight: + """Get a specific insight.""" + insight_response = api_client.get_insight(insight_id) + insight_data = insight_response if isinstance(insight_response, dict) else insight_response[0] + insight = SourceInsight( + insight_type=insight_data["insight_type"], + content=insight_data["content"], + ) + insight.id = insight_data["id"] + insight.created = insight_data["created"] + insight.updated = insight_data["updated"] + # Note: source_id from API response is not stored; use await insight.get_source() if needed + return insight + + def delete_insight(self, insight_id: str) -> bool: + """Delete a specific insight.""" + api_client.delete_insight(insight_id) + return True + + def save_insight_as_note(self, insight_id: str, notebook_id: Optional[str] = None) -> Note: + """Convert an insight to a note.""" + note_response = api_client.save_insight_as_note(insight_id, notebook_id) + note_data = note_response if isinstance(note_response, dict) else note_response[0] + note = Note( + title=note_data["title"], + content=note_data["content"], + note_type=note_data["note_type"], + ) + note.id = note_data["id"] + note.created = note_data["created"] + note.updated = note_data["updated"] + return note + + def create_source_insight(self, source_id: str, transformation_id: str, model_id: Optional[str] = None) -> SourceInsight: + """Create a new insight for a source by running a transformation.""" + insight_response = api_client.create_source_insight(source_id, transformation_id, model_id) + insight_data = insight_response if isinstance(insight_response, dict) else insight_response[0] + insight = SourceInsight( + insight_type=insight_data["insight_type"], + content=insight_data["content"], + ) + insight.id = insight_data["id"] + insight.created = insight_data["created"] + insight.updated = insight_data["updated"] + # Note: source_id from API response is not stored; use await insight.get_source() if needed + return insight + + +# Global service instance +insights_service = InsightsService() \ No newline at end of file diff --git a/api/main.py b/api/main.py new file mode 100644 index 0000000000000000000000000000000000000000..e3bba7ee07bbfeaf9c85630c94705e1c66603d26 --- /dev/null +++ b/api/main.py @@ -0,0 +1,151 @@ +# Load environment variables +from dotenv import load_dotenv +load_dotenv() + +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from loguru import logger + +from api.auth import PasswordAuthMiddleware +from api.routers import ( + auth, + chat, + config, + context, + embedding, + embedding_rebuild, + episode_profiles, + insights, + knowledge_graph, + models, + monitoring, + notebooks, + notes, + ocr, + podcasts, + quiz, + research, + search, + settings, + source_chat, + sources, + speaker_profiles, + study_plans, + transformations, + diagrams, +) +from api.routers import commands as commands_router +from open_notebook.database.async_migrate import AsyncMigrationManager + +# Import commands to register them in the API process +try: + + logger.info("Commands imported in API process") +except Exception as e: + logger.error(f"Failed to import commands in API process: {e}") + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """ + Lifespan event handler for the FastAPI application. + Runs database migrations automatically on startup. + """ + # Startup: Run database migrations + logger.info("Starting API initialization...") + + try: + migration_manager = AsyncMigrationManager() + current_version = await migration_manager.get_current_version() + logger.info(f"Current database version: {current_version}") + + if await migration_manager.needs_migration(): + logger.warning("Database migrations are pending. Running migrations...") + await migration_manager.run_migration_up() + new_version = await migration_manager.get_current_version() + logger.success(f"Migrations completed successfully. Database is now at version {new_version}") + else: + logger.info("Database is already at the latest version. No migrations needed.") + except Exception as e: + logger.error(f"CRITICAL: Database migration failed: {str(e)}") + logger.exception(e) + # Fail fast - don't start the API with an outdated database schema + raise RuntimeError(f"Failed to run database migrations: {str(e)}") from e + + logger.success("API initialization completed successfully") + + # Yield control to the application + yield + + # Shutdown: cleanup if needed + logger.info("API shutdown complete") + + +app = FastAPI( + title="Open Notebook API", + description="API for Open Notebook - Research Assistant", + version="0.2.2", + lifespan=lifespan, +) + +# Add password authentication middleware first +# Exclude /api/auth/status and /api/config from authentication +app.add_middleware(PasswordAuthMiddleware, excluded_paths=["/", "/health", "/docs", "/openapi.json", "/redoc", "/api/auth/status", "/api/config"]) + +# Add CORS middleware last (so it processes first) +# Allow requests from: +# - localhost development (http://localhost:3000) +# - Hugging Face Space backend (https://baveshraam-open-notebook.hf.space) +# - Any frontend deployment (can be restricted further in production) +app.add_middleware( + CORSMiddleware, + allow_origins=[ + "http://localhost:3000", + "http://127.0.0.1:3000", + "https://baveshraam-open-notebook.hf.space", + "*" # Allow all origins - can be restricted later + ], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(auth.router, prefix="/api", tags=["auth"]) +app.include_router(config.router, prefix="/api", tags=["config"]) +app.include_router(notebooks.router, prefix="/api", tags=["notebooks"]) +app.include_router(search.router, prefix="/api", tags=["search"]) +app.include_router(models.router, prefix="/api", tags=["models"]) +app.include_router(transformations.router, prefix="/api", tags=["transformations"]) +app.include_router(notes.router, prefix="/api", tags=["notes"]) +app.include_router(embedding.router, prefix="/api", tags=["embedding"]) +app.include_router(embedding_rebuild.router, prefix="/api/embeddings", tags=["embeddings"]) +app.include_router(settings.router, prefix="/api", tags=["settings"]) +app.include_router(context.router, prefix="/api", tags=["context"]) +app.include_router(sources.router, prefix="/api", tags=["sources"]) +app.include_router(insights.router, prefix="/api", tags=["insights"]) +app.include_router(commands_router.router, prefix="/api", tags=["commands"]) +app.include_router(podcasts.router, prefix="/api", tags=["podcasts"]) +app.include_router(episode_profiles.router, prefix="/api", tags=["episode-profiles"]) +app.include_router(speaker_profiles.router, prefix="/api", tags=["speaker-profiles"]) +app.include_router(chat.router, prefix="/api", tags=["chat"]) +app.include_router(source_chat.router, prefix="/api", tags=["source-chat"]) +app.include_router(quiz.router, prefix="/api", tags=["quiz"]) +app.include_router(research.router, prefix="/api", tags=["research"]) +app.include_router(knowledge_graph.router, prefix="/api", tags=["knowledge-graph"]) +app.include_router(monitoring.router, prefix="/api", tags=["monitoring"]) +app.include_router(ocr.router, prefix="/api", tags=["ocr"]) +app.include_router(study_plans.router, prefix="/api", tags=["study-plans"]) +app.include_router(diagrams.router, prefix="/api", tags=["diagrams"]) + + +@app.get("/") +async def root(): + return {"message": "Open Notebook API is running"} + + +@app.get("/health") +async def health(): + return {"status": "healthy"} diff --git a/api/models.py b/api/models.py new file mode 100644 index 0000000000000000000000000000000000000000..5d382d37d7a5514e22e11d11373803722b194ae1 --- /dev/null +++ b/api/models.py @@ -0,0 +1,430 @@ +from typing import Any, Dict, List, Literal, Optional + +from pydantic import BaseModel, ConfigDict, Field, model_validator + + +# Notebook models +class NotebookCreate(BaseModel): + name: str = Field(..., description="Name of the notebook") + description: str = Field(default="", description="Description of the notebook") + + +class NotebookUpdate(BaseModel): + name: Optional[str] = Field(None, description="Name of the notebook") + description: Optional[str] = Field(None, description="Description of the notebook") + archived: Optional[bool] = Field( + None, description="Whether the notebook is archived" + ) + + +class NotebookResponse(BaseModel): + id: str + name: str + description: str + archived: bool + created: str + updated: str + source_count: int + note_count: int + + +# Search models +class SearchRequest(BaseModel): + query: str = Field(..., description="Search query") + type: Literal["text", "vector"] = Field("text", description="Search type") + limit: int = Field(100, description="Maximum number of results", le=1000) + search_sources: bool = Field(True, description="Include sources in search") + search_notes: bool = Field(True, description="Include notes in search") + minimum_score: float = Field( + 0.2, description="Minimum score for vector search", ge=0, le=1 + ) + + +class SearchResponse(BaseModel): + results: List[Dict[str, Any]] = Field(..., description="Search results") + total_count: int = Field(..., description="Total number of results") + search_type: str = Field(..., description="Type of search performed") + + +class AskRequest(BaseModel): + question: str = Field(..., description="Question to ask the knowledge base") + strategy_model: str = Field(..., description="Model ID for query strategy") + answer_model: str = Field(..., description="Model ID for individual answers") + final_answer_model: str = Field(..., description="Model ID for final answer") + + +class DirectAskRequest(BaseModel): + """Request for direct AI queries (without RAG)""" + question: str = Field(..., description="Question to ask AI") + model_id: Optional[str] = Field(None, description="Model ID to use (optional)") + + +class AskResponse(BaseModel): + answer: str = Field(..., description="Final answer from the knowledge base") + question: str = Field(..., description="Original question") + + +# Models API models +class ModelCreate(BaseModel): + name: str = Field(..., description="Model name (e.g., gpt-5-mini, claude, gemini)") + provider: str = Field( + ..., description="Provider name (e.g., openai, anthropic, gemini)" + ) + type: str = Field( + ..., + description="Model type (language, embedding, text_to_speech, speech_to_text)", + ) + + +class ModelResponse(BaseModel): + id: str + name: str + provider: str + type: str + created: str + updated: str + + +class DefaultModelsResponse(BaseModel): + default_chat_model: Optional[str] = None + default_transformation_model: Optional[str] = None + large_context_model: Optional[str] = None + default_text_to_speech_model: Optional[str] = None + default_speech_to_text_model: Optional[str] = None + default_embedding_model: Optional[str] = None + default_tools_model: Optional[str] = None + + +class ProviderAvailabilityResponse(BaseModel): + available: List[str] = Field(..., description="List of available providers") + unavailable: List[str] = Field(..., description="List of unavailable providers") + supported_types: Dict[str, List[str]] = Field( + ..., description="Provider to supported model types mapping" + ) + + +# Transformations API models +class TransformationCreate(BaseModel): + name: str = Field(..., description="Transformation name") + title: str = Field(..., description="Display title for the transformation") + description: str = Field( + ..., description="Description of what this transformation does" + ) + prompt: str = Field(..., description="The transformation prompt") + apply_default: bool = Field( + False, description="Whether to apply this transformation by default" + ) + + +class TransformationUpdate(BaseModel): + name: Optional[str] = Field(None, description="Transformation name") + title: Optional[str] = Field( + None, description="Display title for the transformation" + ) + description: Optional[str] = Field( + None, description="Description of what this transformation does" + ) + prompt: Optional[str] = Field(None, description="The transformation prompt") + apply_default: Optional[bool] = Field( + None, description="Whether to apply this transformation by default" + ) + + +class TransformationResponse(BaseModel): + id: str + name: str + title: str + description: str + prompt: str + apply_default: bool + created: str + updated: str + + +class TransformationExecuteRequest(BaseModel): + model_config = ConfigDict(protected_namespaces=()) + + transformation_id: str = Field( + ..., description="ID of the transformation to execute" + ) + input_text: str = Field(..., description="Text to transform") + model_id: str = Field(..., description="Model ID to use for the transformation") + + +class TransformationExecuteResponse(BaseModel): + model_config = ConfigDict(protected_namespaces=()) + + output: str = Field(..., description="Transformed text") + transformation_id: str = Field(..., description="ID of the transformation used") + model_id: str = Field(..., description="Model ID used") + + +# Default Prompt API models +class DefaultPromptResponse(BaseModel): + transformation_instructions: str = Field( + ..., description="Default transformation instructions" + ) + + +class DefaultPromptUpdate(BaseModel): + transformation_instructions: str = Field( + ..., description="Default transformation instructions" + ) + + +# Notes API models +class NoteCreate(BaseModel): + title: Optional[str] = Field(None, description="Note title") + content: str = Field(..., description="Note content") + note_type: Optional[str] = Field("human", description="Type of note (human, ai)") + notebook_id: Optional[str] = Field( + None, description="Notebook ID to add the note to" + ) + + +class NoteUpdate(BaseModel): + title: Optional[str] = Field(None, description="Note title") + content: Optional[str] = Field(None, description="Note content") + note_type: Optional[str] = Field(None, description="Type of note (human, ai)") + + +class NoteResponse(BaseModel): + id: str + title: Optional[str] + content: Optional[str] + note_type: Optional[str] + created: str + updated: str + + +# Embedding API models +class EmbedRequest(BaseModel): + item_id: str = Field(..., description="ID of the item to embed") + item_type: str = Field(..., description="Type of item (source, note)") + async_processing: bool = Field( + False, description="Process asynchronously in background" + ) + + +class EmbedResponse(BaseModel): + success: bool = Field(..., description="Whether embedding was successful") + message: str = Field(..., description="Result message") + item_id: str = Field(..., description="ID of the item that was embedded") + item_type: str = Field(..., description="Type of item that was embedded") + command_id: Optional[str] = Field( + None, description="Command ID for async processing" + ) + + +# Rebuild request/response models +class RebuildRequest(BaseModel): + mode: Literal["existing", "all"] = Field( + ..., + description="Rebuild mode: 'existing' only re-embeds items with embeddings, 'all' embeds everything", + ) + include_sources: bool = Field(True, description="Include sources in rebuild") + include_notes: bool = Field(True, description="Include notes in rebuild") + include_insights: bool = Field(True, description="Include insights in rebuild") + + +class RebuildResponse(BaseModel): + command_id: str = Field(..., description="Command ID to track progress") + total_items: int = Field(..., description="Estimated number of items to process") + message: str = Field(..., description="Status message") + + +class RebuildProgress(BaseModel): + processed: int = Field(..., description="Number of items processed") + total: int = Field(..., description="Total items to process") + percentage: float = Field(..., description="Progress percentage") + + +class RebuildStats(BaseModel): + sources: int = Field(0, description="Sources processed") + notes: int = Field(0, description="Notes processed") + insights: int = Field(0, description="Insights processed") + failed: int = Field(0, description="Failed items") + + +class RebuildStatusResponse(BaseModel): + command_id: str = Field(..., description="Command ID") + status: str = Field(..., description="Status: queued, running, completed, failed") + progress: Optional[RebuildProgress] = None + stats: Optional[RebuildStats] = None + started_at: Optional[str] = None + completed_at: Optional[str] = None + error_message: Optional[str] = None + + +# Settings API models +class SettingsResponse(BaseModel): + default_content_processing_engine_doc: Optional[str] = None + default_content_processing_engine_url: Optional[str] = None + default_embedding_option: Optional[str] = None + auto_delete_files: Optional[str] = None + youtube_preferred_languages: Optional[List[str]] = None + + +class SettingsUpdate(BaseModel): + default_content_processing_engine_doc: Optional[str] = None + default_content_processing_engine_url: Optional[str] = None + default_embedding_option: Optional[str] = None + auto_delete_files: Optional[str] = None + youtube_preferred_languages: Optional[List[str]] = None + + +# Sources API models +class AssetModel(BaseModel): + file_path: Optional[str] = None + url: Optional[str] = None + + +class SourceCreate(BaseModel): + # Backward compatibility: support old single notebook_id + notebook_id: Optional[str] = Field( + None, description="Notebook ID to add the source to (deprecated, use notebooks)" + ) + # New multi-notebook support + notebooks: Optional[List[str]] = Field( + None, description="List of notebook IDs to add the source to" + ) + # Required fields + type: str = Field(..., description="Source type: link, upload, or text") + url: Optional[str] = Field(None, description="URL for link type") + file_path: Optional[str] = Field(None, description="File path for upload type") + content: Optional[str] = Field(None, description="Text content for text type") + title: Optional[str] = Field(None, description="Source title") + transformations: Optional[List[str]] = Field( + default_factory=list, description="Transformation IDs to apply" + ) + embed: bool = Field(False, description="Whether to embed content for vector search") + delete_source: bool = Field( + False, description="Whether to delete uploaded file after processing" + ) + # New async processing support + async_processing: bool = Field( + False, description="Whether to process source asynchronously" + ) + + @model_validator(mode="after") + def validate_notebook_fields(self): + # Ensure only one of notebook_id or notebooks is provided + if self.notebook_id is not None and self.notebooks is not None: + raise ValueError( + "Cannot specify both 'notebook_id' and 'notebooks'. Use 'notebooks' for multi-notebook support." + ) + + # Convert single notebook_id to notebooks array for internal processing + if self.notebook_id is not None: + self.notebooks = [self.notebook_id] + # Keep notebook_id for backward compatibility in response + + # Set empty array if no notebooks specified (allow sources without notebooks) + if self.notebooks is None: + self.notebooks = [] + + return self + + +class SourceUpdate(BaseModel): + title: Optional[str] = Field(None, description="Source title") + topics: Optional[List[str]] = Field(None, description="Source topics") + + +class SourceResponse(BaseModel): + id: str + title: Optional[str] + topics: Optional[List[str]] + asset: Optional[AssetModel] + full_text: Optional[str] + embedded: bool + embedded_chunks: int + file_available: Optional[bool] = None + created: str + updated: str + # New fields for async processing + command_id: Optional[str] = None + status: Optional[str] = None + processing_info: Optional[Dict] = None + # Notebook associations + notebooks: Optional[List[str]] = None + + +class SourceListResponse(BaseModel): + id: str + title: Optional[str] + topics: Optional[List[str]] + asset: Optional[AssetModel] + embedded: bool # Boolean flag indicating if source has embeddings + embedded_chunks: int # Number of embedded chunks + insights_count: int + created: str + updated: str + file_available: Optional[bool] = None + # Status fields for async processing + command_id: Optional[str] = None + status: Optional[str] = None + processing_info: Optional[Dict[str, Any]] = None + + +# Context API models +class ContextConfig(BaseModel): + sources: Dict[str, str] = Field( + default_factory=dict, description="Source inclusion config {source_id: level}" + ) + notes: Dict[str, str] = Field( + default_factory=dict, description="Note inclusion config {note_id: level}" + ) + + +class ContextRequest(BaseModel): + notebook_id: str = Field(..., description="Notebook ID to get context for") + context_config: Optional[ContextConfig] = Field( + None, description="Context configuration" + ) + + +class ContextResponse(BaseModel): + notebook_id: str + sources: List[Dict[str, Any]] = Field(..., description="Source context data") + notes: List[Dict[str, Any]] = Field(..., description="Note context data") + total_tokens: Optional[int] = Field(None, description="Estimated token count") + + +# Insights API models +class SourceInsightResponse(BaseModel): + id: str + source_id: str + insight_type: str + content: str + created: str + updated: str + + +class SaveAsNoteRequest(BaseModel): + notebook_id: Optional[str] = Field(None, description="Notebook ID to add note to") + + +class CreateSourceInsightRequest(BaseModel): + model_config = ConfigDict(protected_namespaces=()) + + transformation_id: str = Field(..., description="ID of transformation to apply") + model_id: Optional[str] = Field( + None, description="Model ID (uses default if not provided)" + ) + + +# Source status response +class SourceStatusResponse(BaseModel): + status: Optional[str] = Field(None, description="Processing status") + message: str = Field(..., description="Descriptive message about the status") + processing_info: Optional[Dict[str, Any]] = Field( + None, description="Detailed processing information" + ) + command_id: Optional[str] = Field(None, description="Command ID if available") + + +# Error response +class ErrorResponse(BaseModel): + error: str + message: str diff --git a/api/models_service.py b/api/models_service.py new file mode 100644 index 0000000000000000000000000000000000000000..8196c610b41abab47ea1b6e7018efddeec69bb66 --- /dev/null +++ b/api/models_service.py @@ -0,0 +1,100 @@ +""" +Models service layer using API. +""" + +from typing import List, Optional + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.models import DefaultModels, Model + + +class ModelsService: + """Service layer for models operations using API.""" + + def __init__(self): + logger.info("Using API for models operations") + + def get_all_models(self, model_type: Optional[str] = None) -> List[Model]: + """Get all models with optional type filtering.""" + models_data = api_client.get_models(model_type=model_type) + # Convert API response to Model objects + models = [] + for model_data in models_data: + model = Model( + name=model_data["name"], + provider=model_data["provider"], + type=model_data["type"], + ) + model.id = model_data["id"] + model.created = model_data["created"] + model.updated = model_data["updated"] + models.append(model) + return models + + def create_model(self, name: str, provider: str, model_type: str) -> Model: + """Create a new model.""" + response = api_client.create_model(name, provider, model_type) + model_data = response if isinstance(response, dict) else response[0] + model = Model( + name=model_data["name"], + provider=model_data["provider"], + type=model_data["type"], + ) + model.id = model_data["id"] + model.created = model_data["created"] + model.updated = model_data["updated"] + return model + + def delete_model(self, model_id: str) -> bool: + """Delete a model.""" + api_client.delete_model(model_id) + return True + + def get_default_models(self) -> DefaultModels: + """Get default model assignments.""" + response = api_client.get_default_models() + defaults_data = response if isinstance(response, dict) else response[0] + defaults = DefaultModels() + + # Set the values from API response + defaults.default_chat_model = defaults_data.get("default_chat_model") + defaults.default_transformation_model = defaults_data.get("default_transformation_model") + defaults.large_context_model = defaults_data.get("large_context_model") + defaults.default_text_to_speech_model = defaults_data.get("default_text_to_speech_model") + defaults.default_speech_to_text_model = defaults_data.get("default_speech_to_text_model") + defaults.default_embedding_model = defaults_data.get("default_embedding_model") + defaults.default_tools_model = defaults_data.get("default_tools_model") + + return defaults + + def update_default_models(self, defaults: DefaultModels) -> DefaultModels: + """Update default model assignments.""" + updates = { + "default_chat_model": defaults.default_chat_model, + "default_transformation_model": defaults.default_transformation_model, + "large_context_model": defaults.large_context_model, + "default_text_to_speech_model": defaults.default_text_to_speech_model, + "default_speech_to_text_model": defaults.default_speech_to_text_model, + "default_embedding_model": defaults.default_embedding_model, + "default_tools_model": defaults.default_tools_model, + } + + response = api_client.update_default_models(**updates) + defaults_data = response if isinstance(response, dict) else response[0] + + # Update the defaults object with the response + defaults.default_chat_model = defaults_data.get("default_chat_model") + defaults.default_transformation_model = defaults_data.get("default_transformation_model") + defaults.large_context_model = defaults_data.get("large_context_model") + defaults.default_text_to_speech_model = defaults_data.get("default_text_to_speech_model") + defaults.default_speech_to_text_model = defaults_data.get("default_speech_to_text_model") + defaults.default_embedding_model = defaults_data.get("default_embedding_model") + defaults.default_tools_model = defaults_data.get("default_tools_model") + + return defaults + + +# Global service instance +models_service = ModelsService() \ No newline at end of file diff --git a/api/notebook_service.py b/api/notebook_service.py new file mode 100644 index 0000000000000000000000000000000000000000..340f35e97b15a4253aaa264da063c2646e40f0a5 --- /dev/null +++ b/api/notebook_service.py @@ -0,0 +1,87 @@ +""" +Notebook service layer using API. +""" + +from typing import List, Optional + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.notebook import Notebook + + +class NotebookService: + """Service layer for notebook operations using API.""" + + def __init__(self): + logger.info("Using API for notebook operations") + + def get_all_notebooks(self, order_by: str = "updated desc") -> List[Notebook]: + """Get all notebooks.""" + notebooks_data = api_client.get_notebooks(order_by=order_by) + # Convert API response to Notebook objects + notebooks = [] + for nb_data in notebooks_data: + nb = Notebook( + name=nb_data["name"], + description=nb_data["description"], + archived=nb_data["archived"], + ) + nb.id = nb_data["id"] + nb.created = nb_data["created"] + nb.updated = nb_data["updated"] + notebooks.append(nb) + return notebooks + + def get_notebook(self, notebook_id: str) -> Optional[Notebook]: + """Get a specific notebook.""" + response = api_client.get_notebook(notebook_id) + nb_data = response if isinstance(response, dict) else response[0] + nb = Notebook( + name=nb_data["name"], + description=nb_data["description"], + archived=nb_data["archived"], + ) + nb.id = nb_data["id"] + nb.created = nb_data["created"] + nb.updated = nb_data["updated"] + return nb + + def create_notebook(self, name: str, description: str = "") -> Notebook: + """Create a new notebook.""" + response = api_client.create_notebook(name, description) + nb_data = response if isinstance(response, dict) else response[0] + nb = Notebook( + name=nb_data["name"], + description=nb_data["description"], + archived=nb_data["archived"], + ) + nb.id = nb_data["id"] + nb.created = nb_data["created"] + nb.updated = nb_data["updated"] + return nb + + def update_notebook(self, notebook: Notebook) -> Notebook: + """Update a notebook.""" + updates = { + "name": notebook.name, + "description": notebook.description, + "archived": notebook.archived, + } + response = api_client.update_notebook(notebook.id or "", **updates) + nb_data = response if isinstance(response, dict) else response[0] + # Update the notebook object with the response + notebook.name = nb_data["name"] + notebook.description = nb_data["description"] + notebook.archived = nb_data["archived"] + notebook.updated = nb_data["updated"] + return notebook + + def delete_notebook(self, notebook: Notebook) -> bool: + """Delete a notebook.""" + api_client.delete_notebook(notebook.id or "") + return True + + +# Global service instance +notebook_service = NotebookService() \ No newline at end of file diff --git a/api/notes_service.py b/api/notes_service.py new file mode 100644 index 0000000000000000000000000000000000000000..d47a37be21b81ebd27e016a9ccaa7367e93a82b4 --- /dev/null +++ b/api/notes_service.py @@ -0,0 +1,100 @@ +""" +Notes service layer using API. +""" + +from typing import List, Optional + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.notebook import Note + + +class NotesService: + """Service layer for notes operations using API.""" + + def __init__(self): + logger.info("Using API for notes operations") + + def get_all_notes(self, notebook_id: Optional[str] = None) -> List[Note]: + """Get all notes with optional notebook filtering.""" + notes_data = api_client.get_notes(notebook_id=notebook_id) + # Convert API response to Note objects + notes = [] + for note_data in notes_data: + note = Note( + title=note_data["title"], + content=note_data["content"], + note_type=note_data["note_type"], + ) + note.id = note_data["id"] + note.created = note_data["created"] + note.updated = note_data["updated"] + notes.append(note) + return notes + + def get_note(self, note_id: str) -> Note: + """Get a specific note.""" + note_response = api_client.get_note(note_id) + note_data = note_response if isinstance(note_response, dict) else note_response[0] + note = Note( + title=note_data["title"], + content=note_data["content"], + note_type=note_data["note_type"], + ) + note.id = note_data["id"] + note.created = note_data["created"] + note.updated = note_data["updated"] + return note + + def create_note( + self, + content: str, + title: Optional[str] = None, + note_type: str = "human", + notebook_id: Optional[str] = None + ) -> Note: + """Create a new note.""" + note_response = api_client.create_note( + content=content, + title=title, + note_type=note_type, + notebook_id=notebook_id + ) + note_data = note_response if isinstance(note_response, dict) else note_response[0] + note = Note( + title=note_data["title"], + content=note_data["content"], + note_type=note_data["note_type"], + ) + note.id = note_data["id"] + note.created = note_data["created"] + note.updated = note_data["updated"] + return note + + def update_note(self, note: Note) -> Note: + """Update a note.""" + updates = { + "title": note.title, + "content": note.content, + "note_type": note.note_type, + } + note_response = api_client.update_note(note.id or "", **updates) + note_data = note_response if isinstance(note_response, dict) else note_response[0] + + # Update the note object with the response + note.title = note_data["title"] + note.content = note_data["content"] + note.note_type = note_data["note_type"] + note.updated = note_data["updated"] + + return note + + def delete_note(self, note_id: str) -> bool: + """Delete a note.""" + api_client.delete_note(note_id) + return True + + +# Global service instance +notes_service = NotesService() \ No newline at end of file diff --git a/api/podcast_api_service.py b/api/podcast_api_service.py new file mode 100644 index 0000000000000000000000000000000000000000..edf4d847e3bf2354086f109a21a04ef86eda3638 --- /dev/null +++ b/api/podcast_api_service.py @@ -0,0 +1,125 @@ +""" +Podcast service layer using API client. +This replaces direct httpx calls in the Streamlit pages. +""" + +from typing import Any, Dict, List + +from loguru import logger + +from api.client import api_client + + +class PodcastAPIService: + """Service layer for podcast operations using API client.""" + + def __init__(self): + logger.info("Using API client for podcast operations") + + # Episode methods + def get_episodes(self) -> List[Dict[Any, Any]]: + """Get all podcast episodes.""" + result = api_client._make_request("GET", "/api/podcasts/episodes") + return result if isinstance(result, list) else [result] + + def delete_episode(self, episode_id: str) -> bool: + """Delete a podcast episode.""" + try: + api_client._make_request("DELETE", f"/api/podcasts/episodes/{episode_id}") + return True + except Exception as e: + logger.error(f"Failed to delete episode: {e}") + return False + + # Episode Profile methods + def get_episode_profiles(self) -> List[Dict]: + """Get all episode profiles.""" + return api_client.get_episode_profiles() + + def create_episode_profile(self, profile_data: Dict) -> bool: + """Create a new episode profile.""" + try: + api_client.create_episode_profile(**profile_data) + return True + except Exception as e: + logger.error(f"Failed to create episode profile: {e}") + return False + + def update_episode_profile(self, profile_id: str, profile_data: Dict) -> bool: + """Update an episode profile.""" + try: + api_client.update_episode_profile(profile_id, **profile_data) + return True + except Exception as e: + logger.error(f"Failed to update episode profile: {e}") + return False + + def delete_episode_profile(self, profile_id: str) -> bool: + """Delete an episode profile.""" + try: + api_client.delete_episode_profile(profile_id) + return True + except Exception as e: + logger.error(f"Failed to delete episode profile: {e}") + return False + + def duplicate_episode_profile(self, profile_id: str) -> bool: + """Duplicate an episode profile.""" + try: + api_client._make_request( + "POST", f"/api/episode-profiles/{profile_id}/duplicate" + ) + return True + except Exception as e: + logger.error(f"Failed to duplicate episode profile: {e}") + return False + + # Speaker Profile methods + def get_speaker_profiles(self) -> List[Dict[Any, Any]]: + """Get all speaker profiles.""" + result = api_client._make_request("GET", "/api/speaker-profiles") + return result if isinstance(result, list) else [result] + + def create_speaker_profile(self, profile_data: Dict) -> bool: + """Create a new speaker profile.""" + try: + api_client._make_request("POST", "/api/speaker-profiles", json=profile_data) + return True + except Exception as e: + logger.error(f"Failed to create speaker profile: {e}") + return False + + def update_speaker_profile(self, profile_id: str, profile_data: Dict) -> bool: + """Update a speaker profile.""" + try: + api_client._make_request( + "PUT", f"/api/speaker-profiles/{profile_id}", json=profile_data + ) + return True + except Exception as e: + logger.error(f"Failed to update speaker profile: {e}") + return False + + def delete_speaker_profile(self, profile_id: str) -> bool: + """Delete a speaker profile.""" + try: + api_client._make_request("DELETE", f"/api/speaker-profiles/{profile_id}") + return True + except Exception as e: + logger.error(f"Failed to delete speaker profile: {e}") + return False + + def duplicate_speaker_profile(self, profile_id: str) -> bool: + """Duplicate a speaker profile.""" + try: + api_client._make_request( + "POST", f"/api/speaker-profiles/{profile_id}/duplicate" + ) + return True + except Exception as e: + logger.error(f"Failed to duplicate speaker profile: {e}") + return False + + +# Global service instance +podcast_api_service = PodcastAPIService() diff --git a/api/podcast_service.py b/api/podcast_service.py new file mode 100644 index 0000000000000000000000000000000000000000..8bee41ef31a88cb07bb359310ddcc2dcbbfdfe53 --- /dev/null +++ b/api/podcast_service.py @@ -0,0 +1,206 @@ +from typing import Any, Dict, Optional + +from fastapi import HTTPException +from loguru import logger +from pydantic import BaseModel +from surreal_commands import get_command_status, submit_command + +from open_notebook.domain.notebook import Notebook +from open_notebook.domain.podcast import EpisodeProfile, PodcastEpisode, SpeakerProfile + + +class PodcastGenerationRequest(BaseModel): + """Request model for podcast generation""" + + episode_profile: str + speaker_profile: str + episode_name: str + content: Optional[str] = None + notebook_id: Optional[str] = None + briefing_suffix: Optional[str] = None + + +class PodcastGenerationResponse(BaseModel): + """Response model for podcast generation""" + + job_id: str + status: str + message: str + episode_profile: str + episode_name: str + + +class PodcastService: + """Service layer for podcast operations""" + + @staticmethod + async def submit_generation_job( + episode_profile_name: str, + speaker_profile_name: str, + episode_name: str, + notebook_id: Optional[str] = None, + content: Optional[str] = None, + briefing_suffix: Optional[str] = None, + ) -> str: + """Submit a podcast generation job for background processing""" + try: + # Validate episode profile exists + episode_profile = await EpisodeProfile.get_by_name(episode_profile_name) + if not episode_profile: + raise ValueError(f"Episode profile '{episode_profile_name}' not found") + + # Validate speaker profile exists + speaker_profile = await SpeakerProfile.get_by_name(speaker_profile_name) + if not speaker_profile: + raise ValueError(f"Speaker profile '{speaker_profile_name}' not found") + + # Get content from notebook if not provided directly + if not content and notebook_id: + try: + notebook = await Notebook.get(notebook_id) + # Get notebook context (this may need to be adjusted based on actual Notebook implementation) + content = ( + await notebook.get_context() + if hasattr(notebook, "get_context") + else str(notebook) + ) + except Exception as e: + logger.warning( + f"Failed to get notebook content, using notebook_id as content: {e}" + ) + content = f"Notebook ID: {notebook_id}" + + if not content: + raise ValueError( + "Content is required - provide either content or notebook_id" + ) + + # Prepare command arguments + command_args = { + "episode_profile": episode_profile_name, + "speaker_profile": speaker_profile_name, + "episode_name": episode_name, + "content": str(content), + "briefing_suffix": briefing_suffix, + } + + # Ensure command modules are imported before submitting + # This is needed because submit_command validates against local registry + try: + import commands.podcast_commands # noqa: F401 + except ImportError as import_err: + logger.error(f"Failed to import podcast commands: {import_err}") + raise ValueError("Podcast commands not available") + + # Submit command to surreal-commands + job_id = submit_command("open_notebook", "generate_podcast", command_args) + + # Convert RecordID to string if needed + if not job_id: + raise ValueError("Failed to get job_id from submit_command") + job_id_str = str(job_id) + logger.info( + f"Submitted podcast generation job: {job_id_str} for episode '{episode_name}'" + ) + return job_id_str + + except Exception as e: + logger.error(f"Failed to submit podcast generation job: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to submit podcast generation job: {str(e)}", + ) + + @staticmethod + async def get_job_status(job_id: str) -> Dict[str, Any]: + """Get status of a podcast generation job""" + try: + status = await get_command_status(job_id) + return { + "job_id": job_id, + "status": status.status if status else "unknown", + "result": status.result if status else None, + "error_message": getattr(status, "error_message", None) + if status + else None, + "created": str(status.created) + if status and hasattr(status, "created") and status.created + else None, + "updated": str(status.updated) + if status and hasattr(status, "updated") and status.updated + else None, + "progress": getattr(status, "progress", None) if status else None, + } + except Exception as e: + logger.error(f"Failed to get podcast job status: {e}") + raise HTTPException( + status_code=500, detail=f"Failed to get job status: {str(e)}" + ) + + @staticmethod + async def list_episodes() -> list: + """List all podcast episodes""" + try: + episodes = await PodcastEpisode.get_all(order_by="created desc") + return episodes + except Exception as e: + logger.error(f"Failed to list podcast episodes: {e}") + raise HTTPException( + status_code=500, detail=f"Failed to list episodes: {str(e)}" + ) + + @staticmethod + async def get_episode(episode_id: str) -> PodcastEpisode: + """Get a specific podcast episode""" + try: + episode = await PodcastEpisode.get(episode_id) + return episode + except Exception as e: + logger.error(f"Failed to get podcast episode {episode_id}: {e}") + raise HTTPException(status_code=404, detail=f"Episode not found: {str(e)}") + + +class DefaultProfiles: + """Utility class for creating default profiles (if needed beyond migration data)""" + + @staticmethod + async def create_default_episode_profiles(): + """Create default episode profiles if they don't exist""" + try: + # Check if profiles already exist + existing = await EpisodeProfile.get_all() + if existing: + logger.info(f"Episode profiles already exist: {len(existing)} found") + return existing + + # This would create profiles, but since we have migration data, + # this is mainly for future extensibility + logger.info( + "Default episode profiles should be created via database migration" + ) + return [] + + except Exception as e: + logger.error(f"Failed to create default episode profiles: {e}") + raise + + @staticmethod + async def create_default_speaker_profiles(): + """Create default speaker profiles if they don't exist""" + try: + # Check if profiles already exist + existing = await SpeakerProfile.get_all() + if existing: + logger.info(f"Speaker profiles already exist: {len(existing)} found") + return existing + + # This would create profiles, but since we have migration data, + # this is mainly for future extensibility + logger.info( + "Default speaker profiles should be created via database migration" + ) + return [] + + except Exception as e: + logger.error(f"Failed to create default speaker profiles: {e}") + raise diff --git a/api/routers/__init__.py b/api/routers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/api/routers/auth.py b/api/routers/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..5c35c38942b35e13293bc7ae3e9a4968a18e49e7 --- /dev/null +++ b/api/routers/auth.py @@ -0,0 +1,24 @@ +""" +Authentication router for Open Notebook API. +Provides endpoints to check authentication status. +""" + +import os + +from fastapi import APIRouter + +router = APIRouter(prefix="/auth", tags=["auth"]) + + +@router.get("/status") +async def get_auth_status(): + """ + Check if authentication is enabled. + Returns whether a password is required to access the API. + """ + auth_enabled = bool(os.environ.get("OPEN_NOTEBOOK_PASSWORD")) + + return { + "auth_enabled": auth_enabled, + "message": "Authentication is required" if auth_enabled else "Authentication is disabled" + } diff --git a/api/routers/chat.py b/api/routers/chat.py new file mode 100644 index 0000000000000000000000000000000000000000..61e1468ba921b57b0ab7f1771fee8d434b8035a3 --- /dev/null +++ b/api/routers/chat.py @@ -0,0 +1,493 @@ +import asyncio +from typing import Any, Dict, List, Optional + +from fastapi import APIRouter, HTTPException, Query +from langchain_core.runnables import RunnableConfig +from loguru import logger +from pydantic import BaseModel, Field + +from open_notebook.database.repository import ensure_record_id, repo_query +from open_notebook.domain.notebook import ChatSession, Note, Notebook, Source +from open_notebook.exceptions import ( + NotFoundError, +) +from open_notebook.graphs.chat import graph as chat_graph + +router = APIRouter() + +# Request/Response models +class CreateSessionRequest(BaseModel): + notebook_id: str = Field(..., description="Notebook ID to create session for") + title: Optional[str] = Field(None, description="Optional session title") + model_override: Optional[str] = Field( + None, description="Optional model override for this session" + ) + + +class UpdateSessionRequest(BaseModel): + title: Optional[str] = Field(None, description="New session title") + model_override: Optional[str] = Field( + None, description="Model override for this session" + ) + + +class ChatMessage(BaseModel): + id: str = Field(..., description="Message ID") + type: str = Field(..., description="Message type (human|ai)") + content: str = Field(..., description="Message content") + timestamp: Optional[str] = Field(None, description="Message timestamp") + + +class ChatSessionResponse(BaseModel): + id: str = Field(..., description="Session ID") + title: str = Field(..., description="Session title") + notebook_id: Optional[str] = Field(None, description="Notebook ID") + created: str = Field(..., description="Creation timestamp") + updated: str = Field(..., description="Last update timestamp") + message_count: Optional[int] = Field( + None, description="Number of messages in session" + ) + model_override: Optional[str] = Field( + None, description="Model override for this session" + ) + + +class ChatSessionWithMessagesResponse(ChatSessionResponse): + messages: List[ChatMessage] = Field( + default_factory=list, description="Session messages" + ) + + +class ExecuteChatRequest(BaseModel): + session_id: str = Field(..., description="Chat session ID") + message: str = Field(..., description="User message content") + context: Dict[str, Any] = Field( + ..., description="Chat context with sources and notes" + ) + model_override: Optional[str] = Field( + None, description="Optional model override for this message" + ) + + +class ExecuteChatResponse(BaseModel): + session_id: str = Field(..., description="Session ID") + messages: List[ChatMessage] = Field(..., description="Updated message list") + + +class BuildContextRequest(BaseModel): + notebook_id: str = Field(..., description="Notebook ID") + context_config: Dict[str, Any] = Field(..., description="Context configuration") + + +class BuildContextResponse(BaseModel): + context: Dict[str, Any] = Field(..., description="Built context data") + token_count: int = Field(..., description="Estimated token count") + char_count: int = Field(..., description="Character count") + + +class SuccessResponse(BaseModel): + success: bool = Field(True, description="Operation success status") + message: str = Field(..., description="Success message") + + +@router.get("/chat/sessions", response_model=List[ChatSessionResponse]) +async def get_sessions(notebook_id: str = Query(..., description="Notebook ID")): + """Get all chat sessions for a notebook.""" + try: + # Get notebook to verify it exists + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Get sessions for this notebook + sessions = await notebook.get_chat_sessions() + + return [ + ChatSessionResponse( + id=session.id or "", + title=session.title or "Untitled Session", + notebook_id=notebook_id, + created=str(session.created), + updated=str(session.updated), + message_count=0, # TODO: Add message count if needed + model_override=getattr(session, "model_override", None), + ) + for session in sessions + ] + except NotFoundError: + raise HTTPException(status_code=404, detail="Notebook not found") + except Exception as e: + logger.error(f"Error fetching chat sessions: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching chat sessions: {str(e)}" + ) + + +@router.post("/chat/sessions", response_model=ChatSessionResponse) +async def create_session(request: CreateSessionRequest): + """Create a new chat session.""" + try: + # Verify notebook exists + notebook = await Notebook.get(request.notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Create new session + session = ChatSession( + title=request.title or f"Chat Session {asyncio.get_event_loop().time():.0f}", + model_override=request.model_override, + ) + await session.save() + + # Relate session to notebook + await session.relate_to_notebook(request.notebook_id) + + return ChatSessionResponse( + id=session.id or "", + title=session.title or "", + notebook_id=request.notebook_id, + created=str(session.created), + updated=str(session.updated), + message_count=0, + model_override=session.model_override, + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Notebook not found") + except Exception as e: + logger.error(f"Error creating chat session: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error creating chat session: {str(e)}" + ) + + +@router.get( + "/chat/sessions/{session_id}", response_model=ChatSessionWithMessagesResponse +) +async def get_session(session_id: str): + """Get a specific session with its messages.""" + try: + # Get session + # Ensure session_id has proper table prefix + full_session_id = ( + session_id + if session_id.startswith("chat_session:") + else f"chat_session:{session_id}" + ) + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + # Get session state from LangGraph to retrieve messages + thread_state = chat_graph.get_state( + config=RunnableConfig(configurable={"thread_id": session_id}) + ) + + # Extract messages from state + messages: list[ChatMessage] = [] + if thread_state and thread_state.values and "messages" in thread_state.values: + for msg in thread_state.values["messages"]: + messages.append( + ChatMessage( + id=getattr(msg, "id", f"msg_{len(messages)}"), + type=msg.type if hasattr(msg, "type") else "unknown", + content=msg.content if hasattr(msg, "content") else str(msg), + timestamp=None, # LangChain messages don't have timestamps by default + ) + ) + + # Find notebook_id (we need to query the relationship) + # Ensure session_id has proper table prefix + full_session_id = ( + session_id + if session_id.startswith("chat_session:") + else f"chat_session:{session_id}" + ) + + notebook_query = await repo_query( + "SELECT out FROM refers_to WHERE in = $session_id", + {"session_id": ensure_record_id(full_session_id)}, + ) + + notebook_id = notebook_query[0]["out"] if notebook_query else None + + if not notebook_id: + # This might be an old session created before API migration + logger.warning( + f"No notebook relationship found for session {session_id} - may be an orphaned session" + ) + + return ChatSessionWithMessagesResponse( + id=session.id or "", + title=session.title or "Untitled Session", + notebook_id=notebook_id, + created=str(session.created), + updated=str(session.updated), + message_count=len(messages), + messages=messages, + model_override=getattr(session, "model_override", None), + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Session not found") + except Exception as e: + logger.error(f"Error fetching session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching session: {str(e)}") + + +@router.put("/chat/sessions/{session_id}", response_model=ChatSessionResponse) +async def update_session(session_id: str, request: UpdateSessionRequest): + """Update session title.""" + try: + # Ensure session_id has proper table prefix + full_session_id = ( + session_id + if session_id.startswith("chat_session:") + else f"chat_session:{session_id}" + ) + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + update_data = request.model_dump(exclude_unset=True) + + if "title" in update_data: + session.title = update_data["title"] + + if "model_override" in update_data: + session.model_override = update_data["model_override"] + + await session.save() + + # Find notebook_id + # Ensure session_id has proper table prefix + full_session_id = ( + session_id + if session_id.startswith("chat_session:") + else f"chat_session:{session_id}" + ) + notebook_query = await repo_query( + "SELECT out FROM refers_to WHERE in = $session_id", + {"session_id": ensure_record_id(full_session_id)}, + ) + notebook_id = notebook_query[0]["out"] if notebook_query else None + + return ChatSessionResponse( + id=session.id or "", + title=session.title or "", + notebook_id=notebook_id, + created=str(session.created), + updated=str(session.updated), + message_count=0, + model_override=session.model_override, + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Session not found") + except Exception as e: + logger.error(f"Error updating session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error updating session: {str(e)}") + + +@router.delete("/chat/sessions/{session_id}", response_model=SuccessResponse) +async def delete_session(session_id: str): + """Delete a chat session.""" + try: + # Ensure session_id has proper table prefix + full_session_id = ( + session_id + if session_id.startswith("chat_session:") + else f"chat_session:{session_id}" + ) + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + await session.delete() + + return SuccessResponse(success=True, message="Session deleted successfully") + except NotFoundError: + raise HTTPException(status_code=404, detail="Session not found") + except Exception as e: + logger.error(f"Error deleting session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting session: {str(e)}") + + +@router.post("/chat/execute", response_model=ExecuteChatResponse) +async def execute_chat(request: ExecuteChatRequest): + """Execute a chat request and get AI response.""" + try: + # Verify session exists + # Ensure session_id has proper table prefix + full_session_id = ( + request.session_id + if request.session_id.startswith("chat_session:") + else f"chat_session:{request.session_id}" + ) + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + # Determine model override (per-request override takes precedence over session-level) + model_override = ( + request.model_override + if request.model_override is not None + else getattr(session, "model_override", None) + ) + + # Get current state + current_state = chat_graph.get_state( + config=RunnableConfig( + configurable={"thread_id": request.session_id} + ) + ) + + # Prepare state for execution + state_values = current_state.values if current_state else {} + state_values["messages"] = state_values.get("messages", []) + state_values["context"] = request.context + state_values["model_override"] = model_override + + # Add user message to state + from langchain_core.messages import HumanMessage + + user_message = HumanMessage(content=request.message) + state_values["messages"].append(user_message) + + # Execute chat graph + result = chat_graph.invoke( + input=state_values, # type: ignore[arg-type] + config=RunnableConfig( + configurable={ + "thread_id": request.session_id, + "model_id": model_override, + } + ), + ) + + # Update session timestamp + await session.save() + + # Convert messages to response format + messages: list[ChatMessage] = [] + for msg in result.get("messages", []): + messages.append( + ChatMessage( + id=getattr(msg, "id", f"msg_{len(messages)}"), + type=msg.type if hasattr(msg, "type") else "unknown", + content=msg.content if hasattr(msg, "content") else str(msg), + timestamp=None, + ) + ) + + return ExecuteChatResponse(session_id=request.session_id, messages=messages) + except NotFoundError: + raise HTTPException(status_code=404, detail="Session not found") + except Exception as e: + logger.error(f"Error executing chat: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error executing chat: {str(e)}") + + +@router.post("/chat/context", response_model=BuildContextResponse) +async def build_context(request: BuildContextRequest): + """Build context for a notebook based on context configuration.""" + try: + # Verify notebook exists + notebook = await Notebook.get(request.notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + context_data: dict[str, list[dict[str, str]]] = {"sources": [], "notes": []} + total_content = "" + + # Process context configuration if provided + if request.context_config: + # Process sources + for source_id, status in request.context_config.get("sources", {}).items(): + if "not in" in status: + continue + + try: + # Add table prefix if not present + full_source_id = ( + source_id + if source_id.startswith("source:") + else f"source:{source_id}" + ) + + try: + source = await Source.get(full_source_id) + except Exception: + continue + + if "insights" in status: + source_context = await source.get_context(context_size="short") + context_data["sources"].append(source_context) + total_content += str(source_context) + elif "full content" in status: + source_context = await source.get_context(context_size="long") + context_data["sources"].append(source_context) + total_content += str(source_context) + except Exception as e: + logger.warning(f"Error processing source {source_id}: {str(e)}") + continue + + # Process notes + for note_id, status in request.context_config.get("notes", {}).items(): + if "not in" in status: + continue + + try: + # Add table prefix if not present + full_note_id = ( + note_id if note_id.startswith("note:") else f"note:{note_id}" + ) + note = await Note.get(full_note_id) + if not note: + continue + + if "full content" in status: + note_context = note.get_context(context_size="long") + context_data["notes"].append(note_context) + total_content += str(note_context) + except Exception as e: + logger.warning(f"Error processing note {note_id}: {str(e)}") + continue + else: + # Default behavior - include all sources and notes with short context + sources = await notebook.get_sources() + for source in sources: + try: + source_context = await source.get_context(context_size="short") + context_data["sources"].append(source_context) + total_content += str(source_context) + except Exception as e: + logger.warning(f"Error processing source {source.id}: {str(e)}") + continue + + notes = await notebook.get_notes() + for note in notes: + try: + note_context = note.get_context(context_size="short") + context_data["notes"].append(note_context) + total_content += str(note_context) + except Exception as e: + logger.warning(f"Error processing note {note.id}: {str(e)}") + continue + + # Calculate character and token counts + char_count = len(total_content) + # Use token count utility if available + try: + from open_notebook.utils import token_count + + estimated_tokens = token_count(total_content) if total_content else 0 + except ImportError: + # Fallback to simple estimation + estimated_tokens = char_count // 4 + + return BuildContextResponse( + context=context_data, token_count=estimated_tokens, char_count=char_count + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error building context: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error building context: {str(e)}") diff --git a/api/routers/commands.py b/api/routers/commands.py new file mode 100644 index 0000000000000000000000000000000000000000..264e0d345792d816818ea1042b41c3c7eb75ecaf --- /dev/null +++ b/api/routers/commands.py @@ -0,0 +1,160 @@ +from typing import Any, Dict, List, Optional + +from fastapi import APIRouter, HTTPException, Query +from loguru import logger +from pydantic import BaseModel, Field +from surreal_commands import registry + +from api.command_service import CommandService + +router = APIRouter() + +class CommandExecutionRequest(BaseModel): + command: str = Field(..., description="Command function name (e.g., 'process_text')") + app: str = Field(..., description="Application name (e.g., 'open_notebook')") + input: Dict[str, Any] = Field(..., description="Arguments to pass to the command") + +class CommandJobResponse(BaseModel): + job_id: str + status: str + message: str + +class CommandJobStatusResponse(BaseModel): + job_id: str + status: str + result: Optional[Dict[str, Any]] = None + error_message: Optional[str] = None + created: Optional[str] = None + updated: Optional[str] = None + progress: Optional[Dict[str, Any]] = None + +@router.post("/commands/jobs", response_model=CommandJobResponse) +async def execute_command(request: CommandExecutionRequest): + """ + Submit a command for background processing. + Returns immediately with job ID for status tracking. + + Example request: + { + "command": "process_text", + "app": "open_notebook", + "input": { + "text": "Hello world", + "operation": "uppercase" + } + } + """ + try: + # Submit command using app name (not module name) + job_id = await CommandService.submit_command_job( + module_name=request.app, # This should be "open_notebook" + command_name=request.command, + command_args=request.input + ) + + return CommandJobResponse( + job_id=job_id, + status="submitted", + message=f"Command '{request.command}' submitted successfully" + ) + + except Exception as e: + logger.error(f"Error submitting command: {str(e)}") + raise HTTPException( + status_code=500, + detail=f"Failed to submit command: {str(e)}" + ) + +@router.get("/commands/jobs/{job_id}", response_model=CommandJobStatusResponse) +async def get_command_job_status(job_id: str): + """Get the status of a specific command job""" + try: + status_data = await CommandService.get_command_status(job_id) + return CommandJobStatusResponse(**status_data) + + except Exception as e: + logger.error(f"Error fetching job status: {str(e)}") + raise HTTPException( + status_code=500, + detail=f"Failed to fetch job status: {str(e)}" + ) + +@router.get("/commands/jobs", response_model=List[Dict[str, Any]]) +async def list_command_jobs( + command_filter: Optional[str] = Query(None, description="Filter by command name"), + status_filter: Optional[str] = Query(None, description="Filter by status"), + limit: int = Query(50, description="Maximum number of jobs to return") +): + """List command jobs with optional filtering""" + try: + jobs = await CommandService.list_command_jobs( + command_filter=command_filter, + status_filter=status_filter, + limit=limit + ) + return jobs + + except Exception as e: + logger.error(f"Error listing command jobs: {str(e)}") + raise HTTPException( + status_code=500, + detail=f"Failed to list command jobs: {str(e)}" + ) + +@router.delete("/commands/jobs/{job_id}") +async def cancel_command_job(job_id: str): + """Cancel a running command job""" + try: + success = await CommandService.cancel_command_job(job_id) + return {"job_id": job_id, "cancelled": success} + + except Exception as e: + logger.error(f"Error cancelling command job: {str(e)}") + raise HTTPException( + status_code=500, + detail=f"Failed to cancel command job: {str(e)}" + ) + +@router.get("/commands/registry/debug") +async def debug_registry(): + """Debug endpoint to see what commands are registered""" + try: + # Get all registered commands + all_items = registry.get_all_commands() + + # Create JSON-serializable data + command_items = [] + for item in all_items: + try: + command_items.append({ + "app_id": item.app_id, + "name": item.name, + "full_id": f"{item.app_id}.{item.name}" + }) + except Exception as item_error: + logger.error(f"Error processing item: {item_error}") + + # Get the basic command structure + try: + commands_dict: dict[str, list[str]] = {} + for item in all_items: + if item.app_id not in commands_dict: + commands_dict[item.app_id] = [] + commands_dict[item.app_id].append(item.name) + except Exception: + commands_dict = {} + + return { + "total_commands": len(all_items), + "commands_by_app": commands_dict, + "command_items": command_items + } + + except Exception as e: + logger.error(f"Error debugging registry: {str(e)}") + return { + "error": str(e), + "total_commands": 0, + "commands_by_app": {}, + "command_items": [] + } \ No newline at end of file diff --git a/api/routers/config.py b/api/routers/config.py new file mode 100644 index 0000000000000000000000000000000000000000..3a9e5d591416b721b2df15d976de1a5e248fdcd6 --- /dev/null +++ b/api/routers/config.py @@ -0,0 +1,162 @@ +import asyncio +import os +import time +import tomllib +from pathlib import Path +from typing import Optional + +from fastapi import APIRouter, Request +from loguru import logger + +from open_notebook.database.repository import repo_query +from open_notebook.utils.version_utils import ( + compare_versions, + get_version_from_github, +) + +router = APIRouter() + +# In-memory cache for version check results +_version_cache: dict = { + "latest_version": None, + "has_update": False, + "timestamp": 0, + "check_failed": False, +} + +# Cache TTL in seconds (24 hours) +VERSION_CACHE_TTL = 24 * 60 * 60 + + +def get_version() -> str: + """Read version from pyproject.toml""" + try: + pyproject_path = Path(__file__).parent.parent.parent / "pyproject.toml" + with open(pyproject_path, "rb") as f: + pyproject = tomllib.load(f) + return pyproject.get("project", {}).get("version", "unknown") + except Exception as e: + logger.warning(f"Could not read version from pyproject.toml: {e}") + return "unknown" + + +def get_latest_version_cached(current_version: str) -> tuple[Optional[str], bool]: + """ + Check for the latest version from GitHub with caching. + + Returns: + tuple: (latest_version, has_update) + - latest_version: str or None if check failed + - has_update: bool indicating if update is available + """ + global _version_cache + + # Check if cache is still valid (within TTL) + cache_age = time.time() - _version_cache["timestamp"] + if _version_cache["timestamp"] > 0 and cache_age < VERSION_CACHE_TTL: + logger.debug(f"Using cached version check result (age: {cache_age:.0f}s)") + return _version_cache["latest_version"], _version_cache["has_update"] + + # Cache expired or not yet set + if _version_cache["timestamp"] > 0: + logger.info(f"Version cache expired (age: {cache_age:.0f}s), refreshing...") + + # Perform version check with strict error handling + try: + logger.info("Checking for latest version from GitHub...") + + # Fetch latest version from GitHub with 10-second timeout + latest_version = get_version_from_github( + "https://github.com/lfnovo/open-notebook", + "main" + ) + + logger.info(f"Latest version from GitHub: {latest_version}, Current version: {current_version}") + + # Compare versions + has_update = compare_versions(current_version, latest_version) < 0 + + # Cache the result + _version_cache["latest_version"] = latest_version + _version_cache["has_update"] = has_update + _version_cache["timestamp"] = time.time() + _version_cache["check_failed"] = False + + logger.info(f"Version check complete. Update available: {has_update}") + + return latest_version, has_update + + except Exception as e: + logger.warning(f"Version check failed: {e}") + + # Cache the failure to avoid repeated attempts + _version_cache["latest_version"] = None + _version_cache["has_update"] = False + _version_cache["timestamp"] = time.time() + _version_cache["check_failed"] = True + + return None, False + + +async def check_database_health() -> dict: + """ + Check if database is reachable using a lightweight query. + + Returns: + dict with 'status' ("online" | "offline") and optional 'error' + """ + try: + # 2-second timeout for database health check + result = await asyncio.wait_for( + repo_query("RETURN 1"), + timeout=2.0 + ) + if result: + return {"status": "online"} + return {"status": "offline", "error": "Empty result"} + except asyncio.TimeoutError: + logger.warning("Database health check timed out after 2 seconds") + return {"status": "offline", "error": "Health check timeout"} + except Exception as e: + logger.warning(f"Database health check failed: {e}") + return {"status": "offline", "error": str(e)} + + +@router.get("/config") +async def get_config(request: Request): + """ + Get frontend configuration. + + Returns version information and health status. + Note: The frontend determines the API URL via its own runtime-config endpoint, + so this endpoint no longer returns apiUrl. + + Also checks for version updates from GitHub (with caching and error handling). + """ + # Get current version + current_version = get_version() + + # Check for updates (with caching and error handling) + # This MUST NOT break the endpoint - wrapped in try-except as extra safety + latest_version = None + has_update = False + + try: + latest_version, has_update = get_latest_version_cached(current_version) + except Exception as e: + # Extra safety: ensure version check never breaks the config endpoint + logger.error(f"Unexpected error during version check: {e}") + + # Check database health + db_health = await check_database_health() + db_status = db_health["status"] + + if db_status == "offline": + logger.warning(f"Database offline: {db_health.get('error', 'Unknown error')}") + + return { + "version": current_version, + "latestVersion": latest_version, + "hasUpdate": has_update, + "dbStatus": db_status, + } diff --git a/api/routers/context.py b/api/routers/context.py new file mode 100644 index 0000000000000000000000000000000000000000..70cd70f6f14f28e9303436bbc4c0bfce4bded19c --- /dev/null +++ b/api/routers/context.py @@ -0,0 +1,116 @@ + +from fastapi import APIRouter, HTTPException +from loguru import logger + +from api.models import ContextRequest, ContextResponse +from open_notebook.domain.notebook import Note, Notebook, Source +from open_notebook.exceptions import InvalidInputError +from open_notebook.utils import token_count + +router = APIRouter() + + +@router.post("/notebooks/{notebook_id}/context", response_model=ContextResponse) +async def get_notebook_context(notebook_id: str, context_request: ContextRequest): + """Get context for a notebook based on configuration.""" + try: + # Verify notebook exists + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + context_data: dict[str, list[dict[str, str]]] = {"note": [], "source": []} + total_content = "" + + # Process context configuration if provided + if context_request.context_config: + # Process sources + for source_id, status in context_request.context_config.sources.items(): + if "not in" in status: + continue + + try: + # Add table prefix if not present + full_source_id = ( + source_id + if source_id.startswith("source:") + else f"source:{source_id}" + ) + + try: + source = await Source.get(full_source_id) + except Exception: + continue + + if "insights" in status: + source_context = await source.get_context(context_size="short") + context_data["source"].append(source_context) + total_content += str(source_context) + elif "full content" in status: + source_context = await source.get_context(context_size="long") + context_data["source"].append(source_context) + total_content += str(source_context) + except Exception as e: + logger.warning(f"Error processing source {source_id}: {str(e)}") + continue + + # Process notes + for note_id, status in context_request.context_config.notes.items(): + if "not in" in status: + continue + + try: + # Add table prefix if not present + full_note_id = ( + note_id if note_id.startswith("note:") else f"note:{note_id}" + ) + note = await Note.get(full_note_id) + if not note: + continue + + if "full content" in status: + note_context = note.get_context(context_size="long") + context_data["note"].append(note_context) + total_content += str(note_context) + except Exception as e: + logger.warning(f"Error processing note {note_id}: {str(e)}") + continue + else: + # Default behavior - include all sources and notes with short context + sources = await notebook.get_sources() + for source in sources: + try: + source_context = await source.get_context(context_size="short") + context_data["source"].append(source_context) + total_content += str(source_context) + except Exception as e: + logger.warning(f"Error processing source {source.id}: {str(e)}") + continue + + notes = await notebook.get_notes() + for note in notes: + try: + note_context = note.get_context(context_size="short") + context_data["note"].append(note_context) + total_content += str(note_context) + except Exception as e: + logger.warning(f"Error processing note {note.id}: {str(e)}") + continue + + # Calculate estimated token count + estimated_tokens = token_count(total_content) if total_content else 0 + + return ContextResponse( + notebook_id=notebook_id, + sources=context_data["source"], + notes=context_data["note"], + total_tokens=estimated_tokens, + ) + + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error getting context for notebook {notebook_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error getting context: {str(e)}") diff --git a/api/routers/diagrams.py b/api/routers/diagrams.py new file mode 100644 index 0000000000000000000000000000000000000000..f91f3a2d89c29f175e4b834da3ff8bf5405587c4 --- /dev/null +++ b/api/routers/diagrams.py @@ -0,0 +1,35 @@ +""" +Diagrams API Router + +Endpoints for generating visual diagrams. +""" + +from typing import Dict, Optional, Any +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel, Field + +from open_notebook.services.diagram_service import diagram_service + +router = APIRouter(prefix="/diagrams", tags=["diagrams"]) + +class GenerateDiagramRequest(BaseModel): + query: str = Field(..., description="Description of the diagram to generate") + context: str = Field(..., description="Context content to base the diagram on") + model_id: Optional[str] = None + +class DiagramResponse(BaseModel): + code: str = Field(..., description="Mermaid.js code") + type: str = Field(..., description="Type of diagram (flowchart, sequence, etc.)") + +@router.post("/generate", response_model=DiagramResponse) +async def generate_diagram(request: GenerateDiagramRequest): + """Generate a diagram from text""" + try: + result = await diagram_service.generate_diagram( + request.query, + request.context, + request.model_id + ) + return DiagramResponse(**result) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/api/routers/embedding.py b/api/routers/embedding.py new file mode 100644 index 0000000000000000000000000000000000000000..01613ae1e3a7f248ea90b035f6c6338d4448d59a --- /dev/null +++ b/api/routers/embedding.py @@ -0,0 +1,102 @@ +from fastapi import APIRouter, HTTPException +from loguru import logger + +from api.command_service import CommandService +from api.models import EmbedRequest, EmbedResponse +from open_notebook.domain.models import model_manager +from open_notebook.domain.notebook import Note, Source + +router = APIRouter() + + +@router.post("/embed", response_model=EmbedResponse) +async def embed_content(embed_request: EmbedRequest): + """Embed content for vector search.""" + try: + # Check if embedding model is available + if not await model_manager.get_embedding_model(): + raise HTTPException( + status_code=400, + detail="No embedding model configured. Please configure one in the Models section.", + ) + + item_id = embed_request.item_id + item_type = embed_request.item_type.lower() + + # Validate item type + if item_type not in ["source", "note"]: + raise HTTPException( + status_code=400, detail="Item type must be either 'source' or 'note'" + ) + + # Branch based on processing mode + if embed_request.async_processing: + # ASYNC PATH: Submit command for background processing + logger.info(f"Using async processing for {item_type} {item_id}") + + try: + # Import commands to ensure they're registered + import commands.embedding_commands # noqa: F401 + + # Submit command + command_id = await CommandService.submit_command_job( + "open_notebook", # app name + "embed_single_item", # command name + {"item_id": item_id, "item_type": item_type}, + ) + + logger.info(f"Submitted async embedding command: {command_id}") + + return EmbedResponse( + success=True, + message="Embedding queued for background processing", + item_id=item_id, + item_type=item_type, + command_id=command_id, + ) + + except Exception as e: + logger.error(f"Failed to submit async embedding command: {e}") + raise HTTPException( + status_code=500, detail=f"Failed to queue embedding: {str(e)}" + ) + + else: + # SYNC PATH: Submit job (returns immediately with command_id) + # NOTE: "sync" here means "submit and return command_id" - actual processing + # still happens asynchronously in the worker pool + logger.info(f"Using sync processing for {item_type} {item_id}") + + command_id = None + + # Get the item and embed it + if item_type == "source": + source_item = await Source.get(item_id) + if not source_item: + raise HTTPException(status_code=404, detail="Source not found") + + # Submit vectorization job (returns command_id for tracking) + command_id = await source_item.vectorize() + message = "Source vectorization job submitted" + + elif item_type == "note": + note_item = await Note.get(item_id) + if not note_item: + raise HTTPException(status_code=404, detail="Note not found") + + await note_item.save() # Auto-embeds via ObjectModel.save() + message = "Note embedded successfully" + + return EmbedResponse( + success=True, message=message, item_id=item_id, item_type=item_type, command_id=command_id + ) + + except HTTPException: + raise + except Exception as e: + logger.error( + f"Error embedding {embed_request.item_type} {embed_request.item_id}: {str(e)}" + ) + raise HTTPException( + status_code=500, detail=f"Error embedding content: {str(e)}" + ) diff --git a/api/routers/embedding_rebuild.py b/api/routers/embedding_rebuild.py new file mode 100644 index 0000000000000000000000000000000000000000..8a0f9a108ced329dbbba02ab09adb8f981cc033a --- /dev/null +++ b/api/routers/embedding_rebuild.py @@ -0,0 +1,190 @@ +from fastapi import APIRouter, HTTPException +from loguru import logger +from surreal_commands import get_command_status + +from api.command_service import CommandService +from api.models import ( + RebuildProgress, + RebuildRequest, + RebuildResponse, + RebuildStats, + RebuildStatusResponse, +) +from open_notebook.database.repository import repo_query + +router = APIRouter() + + +@router.post("/rebuild", response_model=RebuildResponse) +async def start_rebuild(request: RebuildRequest): + """ + Start a background job to rebuild embeddings. + + - **mode**: "existing" (re-embed items with embeddings) or "all" (embed everything) + - **include_sources**: Include sources in rebuild (default: true) + - **include_notes**: Include notes in rebuild (default: true) + - **include_insights**: Include insights in rebuild (default: true) + + Returns command ID to track progress and estimated item count. + """ + try: + logger.info(f"Starting rebuild request: mode={request.mode}") + + # Import commands to ensure they're registered + import commands.embedding_commands # noqa: F401 + + # Estimate total items (quick count query) + # This is a rough estimate before the command runs + total_estimate = 0 + + if request.include_sources: + if request.mode == "existing": + # Count sources with embeddings + result = await repo_query( + """ + SELECT VALUE count(array::distinct( + SELECT VALUE source.id + FROM source_embedding + WHERE embedding != none AND array::len(embedding) > 0 + )) as count FROM {} + """ + ) + else: + # Count all sources with content + result = await repo_query( + "SELECT VALUE count() as count FROM source WHERE full_text != none GROUP ALL" + ) + + if result and isinstance(result[0], dict): + total_estimate += result[0].get("count", 0) + elif result: + total_estimate += result[0] if isinstance(result[0], int) else 0 + + if request.include_notes: + if request.mode == "existing": + result = await repo_query( + "SELECT VALUE count() as count FROM note WHERE embedding != none AND array::len(embedding) > 0 GROUP ALL" + ) + else: + result = await repo_query( + "SELECT VALUE count() as count FROM note WHERE content != none GROUP ALL" + ) + + if result and isinstance(result[0], dict): + total_estimate += result[0].get("count", 0) + elif result: + total_estimate += result[0] if isinstance(result[0], int) else 0 + + if request.include_insights: + if request.mode == "existing": + result = await repo_query( + "SELECT VALUE count() as count FROM source_insight WHERE embedding != none AND array::len(embedding) > 0 GROUP ALL" + ) + else: + result = await repo_query( + "SELECT VALUE count() as count FROM source_insight GROUP ALL" + ) + + if result and isinstance(result[0], dict): + total_estimate += result[0].get("count", 0) + elif result: + total_estimate += result[0] if isinstance(result[0], int) else 0 + + logger.info(f"Estimated {total_estimate} items to process") + + # Submit command + command_id = await CommandService.submit_command_job( + "open_notebook", + "rebuild_embeddings", + { + "mode": request.mode, + "include_sources": request.include_sources, + "include_notes": request.include_notes, + "include_insights": request.include_insights, + }, + ) + + logger.info(f"Submitted rebuild command: {command_id}") + + return RebuildResponse( + command_id=command_id, + total_items=total_estimate, + message=f"Rebuild operation started. Estimated {total_estimate} items to process.", + ) + + except Exception as e: + logger.error(f"Failed to start rebuild: {e}") + logger.exception(e) + raise HTTPException( + status_code=500, detail=f"Failed to start rebuild operation: {str(e)}" + ) + + +@router.get("/rebuild/{command_id}/status", response_model=RebuildStatusResponse) +async def get_rebuild_status(command_id: str): + """ + Get the status of a rebuild operation. + + Returns: + - **status**: queued, running, completed, failed + - **progress**: processed count, total count, percentage + - **stats**: breakdown by type (sources, notes, insights, failed) + - **timestamps**: started_at, completed_at + """ + try: + # Get command status from surreal_commands + status = await get_command_status(command_id) + + if not status: + raise HTTPException(status_code=404, detail="Rebuild command not found") + + # Build response based on status + response = RebuildStatusResponse( + command_id=command_id, + status=status.status, + ) + + # Extract metadata from command result + if status.result and isinstance(status.result, dict): + result = status.result + + # Build progress info + if "total_items" in result and "processed_items" in result: + total = result["total_items"] + processed = result["processed_items"] + response.progress = RebuildProgress( + processed=processed, + total=total, + percentage=round((processed / total * 100) if total > 0 else 0, 2), + ) + + # Build stats + response.stats = RebuildStats( + sources=result.get("sources_processed", 0), + notes=result.get("notes_processed", 0), + insights=result.get("insights_processed", 0), + failed=result.get("failed_items", 0), + ) + + # Add timestamps + if hasattr(status, "created") and status.created: + response.started_at = str(status.created) + if hasattr(status, "updated") and status.updated: + response.completed_at = str(status.updated) + + # Add error message if failed + if status.status == "failed" and status.result and isinstance(status.result, dict): + response.error_message = status.result.get( + "error_message", "Unknown error" + ) + + return response + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to get rebuild status: {e}") + logger.exception(e) + raise HTTPException( + status_code=500, detail=f"Failed to get rebuild status: {str(e)}" + ) diff --git a/api/routers/episode_profiles.py b/api/routers/episode_profiles.py new file mode 100644 index 0000000000000000000000000000000000000000..076723a7cc916961b332289276e1d6ca28b3d1f4 --- /dev/null +++ b/api/routers/episode_profiles.py @@ -0,0 +1,262 @@ +from typing import List + +from fastapi import APIRouter, HTTPException +from loguru import logger +from pydantic import BaseModel, Field + +from open_notebook.domain.podcast import EpisodeProfile + +router = APIRouter() + + +class EpisodeProfileResponse(BaseModel): + id: str + name: str + description: str + speaker_config: str + outline_provider: str + outline_model: str + transcript_provider: str + transcript_model: str + default_briefing: str + num_segments: int + + +@router.get("/episode-profiles", response_model=List[EpisodeProfileResponse]) +async def list_episode_profiles(): + """List all available episode profiles""" + try: + profiles = await EpisodeProfile.get_all(order_by="name asc") + + return [ + EpisodeProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + speaker_config=profile.speaker_config, + outline_provider=profile.outline_provider, + outline_model=profile.outline_model, + transcript_provider=profile.transcript_provider, + transcript_model=profile.transcript_model, + default_briefing=profile.default_briefing, + num_segments=profile.num_segments + ) + for profile in profiles + ] + + except Exception as e: + logger.error(f"Failed to fetch episode profiles: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to fetch episode profiles: {str(e)}" + ) + + +@router.get("/episode-profiles/{profile_name}", response_model=EpisodeProfileResponse) +async def get_episode_profile(profile_name: str): + """Get a specific episode profile by name""" + try: + profile = await EpisodeProfile.get_by_name(profile_name) + + if not profile: + raise HTTPException( + status_code=404, + detail=f"Episode profile '{profile_name}' not found" + ) + + return EpisodeProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + speaker_config=profile.speaker_config, + outline_provider=profile.outline_provider, + outline_model=profile.outline_model, + transcript_provider=profile.transcript_provider, + transcript_model=profile.transcript_model, + default_briefing=profile.default_briefing, + num_segments=profile.num_segments + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to fetch episode profile '{profile_name}': {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to fetch episode profile: {str(e)}" + ) + + +class EpisodeProfileCreate(BaseModel): + name: str = Field(..., description="Unique profile name") + description: str = Field("", description="Profile description") + speaker_config: str = Field(..., description="Reference to speaker profile name") + outline_provider: str = Field(..., description="AI provider for outline generation") + outline_model: str = Field(..., description="AI model for outline generation") + transcript_provider: str = Field(..., description="AI provider for transcript generation") + transcript_model: str = Field(..., description="AI model for transcript generation") + default_briefing: str = Field(..., description="Default briefing template") + num_segments: int = Field(default=5, description="Number of podcast segments") + + +@router.post("/episode-profiles", response_model=EpisodeProfileResponse) +async def create_episode_profile(profile_data: EpisodeProfileCreate): + """Create a new episode profile""" + try: + profile = EpisodeProfile( + name=profile_data.name, + description=profile_data.description, + speaker_config=profile_data.speaker_config, + outline_provider=profile_data.outline_provider, + outline_model=profile_data.outline_model, + transcript_provider=profile_data.transcript_provider, + transcript_model=profile_data.transcript_model, + default_briefing=profile_data.default_briefing, + num_segments=profile_data.num_segments + ) + + await profile.save() + + return EpisodeProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + speaker_config=profile.speaker_config, + outline_provider=profile.outline_provider, + outline_model=profile.outline_model, + transcript_provider=profile.transcript_provider, + transcript_model=profile.transcript_model, + default_briefing=profile.default_briefing, + num_segments=profile.num_segments + ) + + except Exception as e: + logger.error(f"Failed to create episode profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to create episode profile: {str(e)}" + ) + + +@router.put("/episode-profiles/{profile_id}", response_model=EpisodeProfileResponse) +async def update_episode_profile(profile_id: str, profile_data: EpisodeProfileCreate): + """Update an existing episode profile""" + try: + profile = await EpisodeProfile.get(profile_id) + + if not profile: + raise HTTPException( + status_code=404, + detail=f"Episode profile '{profile_id}' not found" + ) + + # Update fields + profile.name = profile_data.name + profile.description = profile_data.description + profile.speaker_config = profile_data.speaker_config + profile.outline_provider = profile_data.outline_provider + profile.outline_model = profile_data.outline_model + profile.transcript_provider = profile_data.transcript_provider + profile.transcript_model = profile_data.transcript_model + profile.default_briefing = profile_data.default_briefing + profile.num_segments = profile_data.num_segments + + await profile.save() + + return EpisodeProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + speaker_config=profile.speaker_config, + outline_provider=profile.outline_provider, + outline_model=profile.outline_model, + transcript_provider=profile.transcript_provider, + transcript_model=profile.transcript_model, + default_briefing=profile.default_briefing, + num_segments=profile.num_segments + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to update episode profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to update episode profile: {str(e)}" + ) + + +@router.delete("/episode-profiles/{profile_id}") +async def delete_episode_profile(profile_id: str): + """Delete an episode profile""" + try: + profile = await EpisodeProfile.get(profile_id) + + if not profile: + raise HTTPException( + status_code=404, + detail=f"Episode profile '{profile_id}' not found" + ) + + await profile.delete() + + return {"message": "Episode profile deleted successfully"} + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to delete episode profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to delete episode profile: {str(e)}" + ) + + +@router.post("/episode-profiles/{profile_id}/duplicate", response_model=EpisodeProfileResponse) +async def duplicate_episode_profile(profile_id: str): + """Duplicate an episode profile""" + try: + original = await EpisodeProfile.get(profile_id) + + if not original: + raise HTTPException( + status_code=404, + detail=f"Episode profile '{profile_id}' not found" + ) + + # Create duplicate with modified name + duplicate = EpisodeProfile( + name=f"{original.name} - Copy", + description=original.description, + speaker_config=original.speaker_config, + outline_provider=original.outline_provider, + outline_model=original.outline_model, + transcript_provider=original.transcript_provider, + transcript_model=original.transcript_model, + default_briefing=original.default_briefing, + num_segments=original.num_segments + ) + + await duplicate.save() + + return EpisodeProfileResponse( + id=str(duplicate.id), + name=duplicate.name, + description=duplicate.description or "", + speaker_config=duplicate.speaker_config, + outline_provider=duplicate.outline_provider, + outline_model=duplicate.outline_model, + transcript_provider=duplicate.transcript_provider, + transcript_model=duplicate.transcript_model, + default_briefing=duplicate.default_briefing, + num_segments=duplicate.num_segments + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to duplicate episode profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to duplicate episode profile: {str(e)}" + ) \ No newline at end of file diff --git a/api/routers/insights.py b/api/routers/insights.py new file mode 100644 index 0000000000000000000000000000000000000000..b651e70907c46715e0ac04de03e1cc8d304d8429 --- /dev/null +++ b/api/routers/insights.py @@ -0,0 +1,81 @@ + +from fastapi import APIRouter, HTTPException +from loguru import logger + +from api.models import NoteResponse, SaveAsNoteRequest, SourceInsightResponse +from open_notebook.domain.notebook import SourceInsight +from open_notebook.exceptions import InvalidInputError + +router = APIRouter() + + +@router.get("/insights/{insight_id}", response_model=SourceInsightResponse) +async def get_insight(insight_id: str): + """Get a specific insight by ID.""" + try: + insight = await SourceInsight.get(insight_id) + if not insight: + raise HTTPException(status_code=404, detail="Insight not found") + + # Get source ID from the insight relationship + source = await insight.get_source() + + return SourceInsightResponse( + id=insight.id or "", + source_id=source.id or "", + insight_type=insight.insight_type, + content=insight.content, + created=str(insight.created), + updated=str(insight.updated), + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching insight {insight_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching insight: {str(e)}") + + +@router.delete("/insights/{insight_id}") +async def delete_insight(insight_id: str): + """Delete a specific insight.""" + try: + insight = await SourceInsight.get(insight_id) + if not insight: + raise HTTPException(status_code=404, detail="Insight not found") + + await insight.delete() + + return {"message": "Insight deleted successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting insight {insight_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting insight: {str(e)}") + + +@router.post("/insights/{insight_id}/save-as-note", response_model=NoteResponse) +async def save_insight_as_note(insight_id: str, request: SaveAsNoteRequest): + """Convert an insight to a note.""" + try: + insight = await SourceInsight.get(insight_id) + if not insight: + raise HTTPException(status_code=404, detail="Insight not found") + + # Use the existing save_as_note method from the domain model + note = await insight.save_as_note(request.notebook_id) + + return NoteResponse( + id=note.id or "", + title=note.title, + content=note.content, + note_type=note.note_type, + created=str(note.created), + updated=str(note.updated), + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error saving insight {insight_id} as note: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error saving insight as note: {str(e)}") \ No newline at end of file diff --git a/api/routers/knowledge_graph.py b/api/routers/knowledge_graph.py new file mode 100644 index 0000000000000000000000000000000000000000..61025a333ccbaefafffb951f3bb9e72592fa5b9a --- /dev/null +++ b/api/routers/knowledge_graph.py @@ -0,0 +1,269 @@ +""" +Knowledge Graph API Router + +Endpoints for building and querying knowledge graphs. +""" + +from typing import List, Optional +from datetime import datetime + +from fastapi import APIRouter, HTTPException, BackgroundTasks +from pydantic import BaseModel, Field +from loguru import logger + +from open_notebook.domain.knowledge_graph import ( + KnowledgeGraph, + ConceptNode, + ConceptEdge, + KnowledgeGraphMeta +) +from open_notebook.services.knowledge_graph_service import knowledge_graph_service +from open_notebook.database.repository import repo + + +router = APIRouter(prefix="/knowledge-graph", tags=["knowledge-graph"]) + + +# ============================================================================ +# Request/Response Models +# ============================================================================ + +class BuildGraphRequest(BaseModel): + """Request to build a knowledge graph""" + notebook_id: str + model_id: Optional[str] = None + + +class GraphNode(BaseModel): + """A node in the graph visualization format""" + id: str + label: str + type: str + description: Optional[str] = None + importance: float = 0.5 + mentions: int = 1 + val: float = 5 # Size for visualization + color: str = "#3b82f6" + + +class GraphLink(BaseModel): + """A link/edge in the graph visualization format""" + source: str + target: str + relationship: str + weight: float = 1.0 + + +class GraphData(BaseModel): + """Complete graph data for visualization""" + nodes: List[GraphNode] + links: List[GraphLink] + + +class GraphMetaResponse(BaseModel): + """Metadata about a knowledge graph""" + notebook_id: str + node_count: int + edge_count: int + last_built: Optional[datetime] + build_status: str + error_message: Optional[str] = None + + +class NodeDetailResponse(BaseModel): + """Detailed information about a concept node""" + node: dict + connections: List[dict] + edges: List[dict] + + +# ============================================================================ +# Endpoints +# ============================================================================ + +@router.post("/build", response_model=GraphMetaResponse) +async def build_knowledge_graph(request: BuildGraphRequest, background_tasks: BackgroundTasks): + """ + Build a knowledge graph for a notebook. + This starts a background task and returns immediately with status. + """ + notebook_id = request.notebook_id + + # Check if notebook exists + notebook_result = await repo.get(notebook_id) + if not notebook_result: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Get or create metadata + meta = await KnowledgeGraphMeta.get_for_notebook(notebook_id) + if not meta: + meta = KnowledgeGraphMeta(notebook_id=notebook_id) + await meta.save() + + # Start background build + background_tasks.add_task( + _build_graph_task, + notebook_id, + request.model_id + ) + + meta.build_status = "building" + await meta.save() + + logger.info(f"Started knowledge graph build for notebook {notebook_id}") + + return GraphMetaResponse( + notebook_id=notebook_id, + node_count=meta.node_count, + edge_count=meta.edge_count, + last_built=meta.last_built, + build_status="building", + error_message=None + ) + + +async def _build_graph_task(notebook_id: str, model_id: Optional[str]): + """Background task to build the knowledge graph""" + meta = None + try: + logger.info(f"Starting knowledge graph build task for notebook {notebook_id}") + + # Get sources for the notebook via reference table - include both full_text and insights + # Sources are linked to notebooks via the 'reference' edge table (source -> notebook) + # Use type::thing() to convert the string parameter to a record ID for proper matching + query = """ + SELECT + in.id AS id, + in.title AS title, + in.full_text AS full_text, + (SELECT array::group(content) FROM insight WHERE insight.source = in.id) AS insights + FROM reference + WHERE out = type::thing($notebook_id) + AND (in.full_text IS NOT NONE OR in.id IN (SELECT source FROM insight)) + """ + logger.info(f"Executing KG query for notebook {notebook_id}") + sources = await repo.query(query, {"notebook_id": notebook_id}) + + logger.info(f"KG Query result: Found {len(sources) if sources else 0} sources") + if sources: + for s in sources: + has_text = bool(s.get('full_text')) + insights_len = len(s.get('insights', []) or []) + logger.info(f"Source {s.get('id')}: full_text={has_text}, insights={insights_len}") + + if not sources: + logger.warning(f"No sources with content or insights found for notebook {notebook_id}") + meta = await KnowledgeGraphMeta.get_for_notebook(notebook_id) + if meta: + meta.build_status = "completed" + meta.node_count = 0 + meta.edge_count = 0 + meta.last_built = datetime.now() + meta.error_message = "No sources with text content found" + await meta.save() + return + + # Build the graph + result = await knowledge_graph_service.build_knowledge_graph( + notebook_id, + sources, + model_id + ) + + logger.info(f"Knowledge graph build completed for notebook {notebook_id}") + + except Exception as e: + logger.error(f"Knowledge graph build failed for {notebook_id}: {e}", exc_info=True) + # Update metadata with error + if not meta: + meta = await KnowledgeGraphMeta.get_for_notebook(notebook_id) + if meta: + meta.build_status = "error" + meta.error_message = str(e) + await meta.save() + + +@router.get("/status/{notebook_id}", response_model=GraphMetaResponse) +async def get_graph_status(notebook_id: str): + """Get the build status of a notebook's knowledge graph""" + meta = await KnowledgeGraphMeta.get_for_notebook(notebook_id) + + if not meta: + return GraphMetaResponse( + notebook_id=notebook_id, + node_count=0, + edge_count=0, + last_built=None, + build_status="not_built", + error_message=None + ) + + return GraphMetaResponse( + notebook_id=notebook_id, + node_count=meta.node_count, + edge_count=meta.edge_count, + last_built=meta.last_built, + build_status=meta.build_status, + error_message=meta.error_message + ) + + +@router.get("/{notebook_id}", response_model=GraphData) +async def get_knowledge_graph(notebook_id: str): + """Get the complete knowledge graph for a notebook""" + graph = await KnowledgeGraph.load(notebook_id) + + if not graph.nodes: + return GraphData(nodes=[], links=[]) + + graph_data = graph.to_graph_data() + + return GraphData( + nodes=[GraphNode(**n) for n in graph_data["nodes"]], + links=[GraphLink(**l) for l in graph_data["links"]] + ) + + +@router.get("/node/{node_id}", response_model=NodeDetailResponse) +async def get_node_details(node_id: str): + """Get detailed information about a specific concept node""" + details = await knowledge_graph_service.get_node_details(node_id) + + if not details: + raise HTTPException(status_code=404, detail="Node not found") + + return NodeDetailResponse(**details) + + +@router.get("/nodes/{notebook_id}", response_model=List[dict]) +async def get_nodes(notebook_id: str, type: Optional[str] = None): + """Get all nodes for a notebook, optionally filtered by type""" + nodes = await ConceptNode.find_by_notebook(notebook_id) + + if type: + nodes = [n for n in nodes if n.type == type] + + return [n.model_dump() for n in nodes] + + +@router.delete("/{notebook_id}") +async def delete_knowledge_graph(notebook_id: str): + """Delete a notebook's knowledge graph""" + # Delete nodes + nodes = await ConceptNode.find_by_notebook(notebook_id) + for node in nodes: + await node.delete() + + # Delete edges + edges = await ConceptEdge.find_by_notebook(notebook_id) + for edge in edges: + await edge.delete() + + # Delete metadata + meta = await KnowledgeGraphMeta.get_for_notebook(notebook_id) + if meta and meta.id: + await repo.delete(meta.id) + + logger.info(f"Deleted knowledge graph for notebook {notebook_id}") + + return {"status": "deleted", "notebook_id": notebook_id} diff --git a/api/routers/models.py b/api/routers/models.py new file mode 100644 index 0000000000000000000000000000000000000000..261a4bafde70810536d1d7afbbaf008c501cae38 --- /dev/null +++ b/api/routers/models.py @@ -0,0 +1,300 @@ +import os +from typing import List, Optional + +from esperanto import AIFactory +from fastapi import APIRouter, HTTPException, Query +from loguru import logger + +from api.models import ( + DefaultModelsResponse, + ModelCreate, + ModelResponse, + ProviderAvailabilityResponse, +) +from open_notebook.domain.models import DefaultModels, Model +from open_notebook.exceptions import InvalidInputError + +router = APIRouter() + + +def _check_openai_compatible_support(mode: str) -> bool: + """ + Check if OpenAI-compatible provider is available for a specific mode. + + Args: + mode: One of 'LLM', 'EMBEDDING', 'STT', 'TTS' + + Returns: + bool: True if either generic or mode-specific env var is set + """ + generic = os.environ.get("OPENAI_COMPATIBLE_BASE_URL") is not None + specific = os.environ.get(f"OPENAI_COMPATIBLE_BASE_URL_{mode}") is not None + return generic or specific + + +def _check_azure_support(mode: str) -> bool: + """ + Check if Azure OpenAI provider is available for a specific mode. + + Args: + mode: One of 'LLM', 'EMBEDDING', 'STT', 'TTS' + + Returns: + bool: True if either generic or mode-specific env vars are set + """ + # Check generic configuration (applies to all modes) + generic = ( + os.environ.get("AZURE_OPENAI_API_KEY") is not None + and os.environ.get("AZURE_OPENAI_ENDPOINT") is not None + and os.environ.get("AZURE_OPENAI_API_VERSION") is not None + ) + + # Check mode-specific configuration (takes precedence) + specific = ( + os.environ.get(f"AZURE_OPENAI_API_KEY_{mode}") is not None + and os.environ.get(f"AZURE_OPENAI_ENDPOINT_{mode}") is not None + and os.environ.get(f"AZURE_OPENAI_API_VERSION_{mode}") is not None + ) + + return generic or specific + + +@router.get("/models", response_model=List[ModelResponse]) +async def get_models( + type: Optional[str] = Query(None, description="Filter by model type") +): + """Get all configured models with optional type filtering.""" + try: + if type: + models = await Model.get_models_by_type(type) + else: + models = await Model.get_all() + + return [ + ModelResponse( + id=model.id, + name=model.name, + provider=model.provider, + type=model.type, + created=str(model.created), + updated=str(model.updated), + ) + for model in models + ] + except Exception as e: + logger.error(f"Error fetching models: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching models: {str(e)}") + + +@router.post("/models", response_model=ModelResponse) +async def create_model(model_data: ModelCreate): + """Create a new model configuration.""" + try: + # Validate model type + valid_types = ["language", "embedding", "text_to_speech", "speech_to_text"] + if model_data.type not in valid_types: + raise HTTPException( + status_code=400, + detail=f"Invalid model type. Must be one of: {valid_types}" + ) + + # Check for duplicate model name under the same provider (case-insensitive) + from open_notebook.database.repository import repo_query + existing = await repo_query( + "SELECT * FROM model WHERE string::lowercase(provider) = $provider AND string::lowercase(name) = $name LIMIT 1", + {"provider": model_data.provider.lower(), "name": model_data.name.lower()} + ) + if existing: + raise HTTPException( + status_code=400, + detail=f"Model '{model_data.name}' already exists for provider '{model_data.provider}'" + ) + + new_model = Model( + name=model_data.name, + provider=model_data.provider, + type=model_data.type, + ) + await new_model.save() + + return ModelResponse( + id=new_model.id or "", + name=new_model.name, + provider=new_model.provider, + type=new_model.type, + created=str(new_model.created), + updated=str(new_model.updated), + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating model: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error creating model: {str(e)}") + + +@router.delete("/models/{model_id}") +async def delete_model(model_id: str): + """Delete a model configuration.""" + try: + model = await Model.get(model_id) + if not model: + raise HTTPException(status_code=404, detail="Model not found") + + await model.delete() + + return {"message": "Model deleted successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting model {model_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting model: {str(e)}") + + +@router.get("/models/defaults", response_model=DefaultModelsResponse) +async def get_default_models(): + """Get default model assignments.""" + try: + defaults = await DefaultModels.get_instance() + + return DefaultModelsResponse( + default_chat_model=defaults.default_chat_model, # type: ignore[attr-defined] + default_transformation_model=defaults.default_transformation_model, # type: ignore[attr-defined] + large_context_model=defaults.large_context_model, # type: ignore[attr-defined] + default_text_to_speech_model=defaults.default_text_to_speech_model, # type: ignore[attr-defined] + default_speech_to_text_model=defaults.default_speech_to_text_model, # type: ignore[attr-defined] + default_embedding_model=defaults.default_embedding_model, # type: ignore[attr-defined] + default_tools_model=defaults.default_tools_model, # type: ignore[attr-defined] + ) + except Exception as e: + logger.error(f"Error fetching default models: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching default models: {str(e)}") + + +@router.put("/models/defaults", response_model=DefaultModelsResponse) +async def update_default_models(defaults_data: DefaultModelsResponse): + """Update default model assignments.""" + try: + defaults = await DefaultModels.get_instance() + + # Update only provided fields + if defaults_data.default_chat_model is not None: + defaults.default_chat_model = defaults_data.default_chat_model # type: ignore[attr-defined] + if defaults_data.default_transformation_model is not None: + defaults.default_transformation_model = defaults_data.default_transformation_model # type: ignore[attr-defined] + if defaults_data.large_context_model is not None: + defaults.large_context_model = defaults_data.large_context_model # type: ignore[attr-defined] + if defaults_data.default_text_to_speech_model is not None: + defaults.default_text_to_speech_model = defaults_data.default_text_to_speech_model # type: ignore[attr-defined] + if defaults_data.default_speech_to_text_model is not None: + defaults.default_speech_to_text_model = defaults_data.default_speech_to_text_model # type: ignore[attr-defined] + if defaults_data.default_embedding_model is not None: + defaults.default_embedding_model = defaults_data.default_embedding_model # type: ignore[attr-defined] + if defaults_data.default_tools_model is not None: + defaults.default_tools_model = defaults_data.default_tools_model # type: ignore[attr-defined] + + await defaults.update() + + # No cache refresh needed - next access will fetch fresh data from DB + + return DefaultModelsResponse( + default_chat_model=defaults.default_chat_model, # type: ignore[attr-defined] + default_transformation_model=defaults.default_transformation_model, # type: ignore[attr-defined] + large_context_model=defaults.large_context_model, # type: ignore[attr-defined] + default_text_to_speech_model=defaults.default_text_to_speech_model, # type: ignore[attr-defined] + default_speech_to_text_model=defaults.default_speech_to_text_model, # type: ignore[attr-defined] + default_embedding_model=defaults.default_embedding_model, # type: ignore[attr-defined] + default_tools_model=defaults.default_tools_model, # type: ignore[attr-defined] + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error updating default models: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error updating default models: {str(e)}") + + +@router.get("/models/providers", response_model=ProviderAvailabilityResponse) +async def get_provider_availability(): + """Get provider availability based on environment variables.""" + try: + # Check which providers have API keys configured + provider_status = { + "ollama": os.environ.get("OLLAMA_API_BASE") is not None, + "openai": os.environ.get("OPENAI_API_KEY") is not None, + "groq": os.environ.get("GROQ_API_KEY") is not None, + "xai": os.environ.get("XAI_API_KEY") is not None, + "vertex": ( + os.environ.get("VERTEX_PROJECT") is not None + and os.environ.get("VERTEX_LOCATION") is not None + and os.environ.get("GOOGLE_APPLICATION_CREDENTIALS") is not None + ), + "google": ( + os.environ.get("GOOGLE_API_KEY") is not None + or os.environ.get("GEMINI_API_KEY") is not None + ), + "openrouter": os.environ.get("OPENROUTER_API_KEY") is not None, + "anthropic": os.environ.get("ANTHROPIC_API_KEY") is not None, + "elevenlabs": os.environ.get("ELEVENLABS_API_KEY") is not None, + "voyage": os.environ.get("VOYAGE_API_KEY") is not None, + "azure": ( + _check_azure_support("LLM") + or _check_azure_support("EMBEDDING") + or _check_azure_support("STT") + or _check_azure_support("TTS") + ), + "mistral": os.environ.get("MISTRAL_API_KEY") is not None, + "deepseek": os.environ.get("DEEPSEEK_API_KEY") is not None, + "openai-compatible": ( + _check_openai_compatible_support("LLM") + or _check_openai_compatible_support("EMBEDDING") + or _check_openai_compatible_support("STT") + or _check_openai_compatible_support("TTS") + ), + } + + available_providers = [k for k, v in provider_status.items() if v] + unavailable_providers = [k for k, v in provider_status.items() if not v] + + # Get supported model types from Esperanto + esperanto_available = AIFactory.get_available_providers() + + # Build supported types mapping only for available providers + supported_types: dict[str, list[str]] = {} + for provider in available_providers: + supported_types[provider] = [] + + # Map Esperanto model types to our environment variable modes + mode_mapping = { + "language": "LLM", + "embedding": "EMBEDDING", + "speech_to_text": "STT", + "text_to_speech": "TTS", + } + + # Special handling for openai-compatible to check mode-specific availability + if provider == "openai-compatible": + for model_type, mode in mode_mapping.items(): + if model_type in esperanto_available and provider in esperanto_available[model_type]: + if _check_openai_compatible_support(mode): + supported_types[provider].append(model_type) + # Special handling for azure to check mode-specific availability + elif provider == "azure": + for model_type, mode in mode_mapping.items(): + if model_type in esperanto_available and provider in esperanto_available[model_type]: + if _check_azure_support(mode): + supported_types[provider].append(model_type) + else: + # Standard provider detection + for model_type, providers in esperanto_available.items(): + if provider in providers: + supported_types[provider].append(model_type) + + return ProviderAvailabilityResponse( + available=available_providers, + unavailable=unavailable_providers, + supported_types=supported_types + ) + except Exception as e: + logger.error(f"Error checking provider availability: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error checking provider availability: {str(e)}") \ No newline at end of file diff --git a/api/routers/monitoring.py b/api/routers/monitoring.py new file mode 100644 index 0000000000000000000000000000000000000000..a5b20f56fd4105710fa591ab1f688ca4c5258cb7 --- /dev/null +++ b/api/routers/monitoring.py @@ -0,0 +1,375 @@ +""" +Auto-Update Agent API Router + +Endpoints for managing source monitoring and update notifications. +""" + +from typing import List, Optional +from fastapi import APIRouter, HTTPException, BackgroundTasks +from pydantic import BaseModel +from loguru import logger + +from open_notebook.domain.auto_update import ( + SourceMonitor, + UpdateNotification, + MonitorJobRun, + SourceMonitorCreate, + SourceMonitorUpdate, + NotificationAction, + MonitoringStats, +) +from open_notebook.services.auto_update_service import auto_update_service + + +router = APIRouter(prefix="/monitoring", tags=["monitoring"]) + + +# ============================================================================ +# Response Models +# ============================================================================ + +class SourceMonitorResponse(BaseModel): + """Response model for source monitor.""" + id: Optional[str] = None + source_id: str + enabled: bool + check_frequency: str + last_checked_at: Optional[str] = None + last_content_hash: Optional[str] = None + consecutive_failures: int = 0 + + +class NotificationResponse(BaseModel): + """Response model for notification.""" + id: Optional[str] = None + source_id: str + source_title: str + change_summary: str + diff_highlights: List[str] + old_content_preview: Optional[str] = None + new_content_preview: Optional[str] = None + severity: str + is_read: bool + is_dismissed: bool + created_at: str + + +class JobRunResponse(BaseModel): + """Response model for job run.""" + id: Optional[str] = None + started_at: str + completed_at: Optional[str] = None + status: str + sources_checked: int + updates_found: int + errors: List[str] + + +class StatsResponse(BaseModel): + """Response model for monitoring stats.""" + total_monitors: int + enabled_monitors: int + unread_notifications: int + last_job_run: Optional[str] = None + last_job_status: Optional[str] = None + + +# ============================================================================ +# Monitor Endpoints +# ============================================================================ + +@router.post("/monitors", response_model=SourceMonitorResponse) +async def create_monitor(request: SourceMonitorCreate): + """ + Create or update monitoring for a source. + + If monitoring already exists for the source, it will be updated. + """ + try: + monitor = await auto_update_service.create_monitor( + source_id=request.source_id, + check_frequency=request.check_frequency, + enabled=request.enabled, + ) + return SourceMonitorResponse( + id=monitor.id, + source_id=monitor.source_id, + enabled=monitor.enabled, + check_frequency=monitor.check_frequency, + last_checked_at=monitor.last_checked_at.isoformat() if monitor.last_checked_at else None, + last_content_hash=monitor.last_content_hash, + consecutive_failures=monitor.consecutive_failures, + ) + except Exception as e: + logger.error(f"Failed to create monitor: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/monitors", response_model=List[SourceMonitorResponse]) +async def list_monitors(): + """Get all source monitors.""" + try: + monitors = await auto_update_service.get_all_monitors() + return [ + SourceMonitorResponse( + id=m.id, + source_id=m.source_id, + enabled=m.enabled, + check_frequency=m.check_frequency, + last_checked_at=m.last_checked_at.isoformat() if m.last_checked_at else None, + last_content_hash=m.last_content_hash, + consecutive_failures=m.consecutive_failures, + ) + for m in monitors + ] + except Exception as e: + logger.error(f"Failed to list monitors: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/monitors/{source_id}", response_model=SourceMonitorResponse) +async def get_monitor(source_id: str): + """Get monitor for a specific source.""" + monitor = await auto_update_service.get_monitor(source_id) + if not monitor: + raise HTTPException(status_code=404, detail="Monitor not found") + + return SourceMonitorResponse( + id=monitor.id, + source_id=monitor.source_id, + enabled=monitor.enabled, + check_frequency=monitor.check_frequency, + last_checked_at=monitor.last_checked_at.isoformat() if monitor.last_checked_at else None, + last_content_hash=monitor.last_content_hash, + consecutive_failures=monitor.consecutive_failures, + ) + + +@router.patch("/monitors/{source_id}", response_model=SourceMonitorResponse) +async def update_monitor(source_id: str, request: SourceMonitorUpdate): + """Update monitoring settings for a source.""" + monitor = await auto_update_service.update_monitor( + source_id=source_id, + check_frequency=request.check_frequency, + enabled=request.enabled, + ) + if not monitor: + raise HTTPException(status_code=404, detail="Monitor not found") + + return SourceMonitorResponse( + id=monitor.id, + source_id=monitor.source_id, + enabled=monitor.enabled, + check_frequency=monitor.check_frequency, + last_checked_at=monitor.last_checked_at.isoformat() if monitor.last_checked_at else None, + last_content_hash=monitor.last_content_hash, + consecutive_failures=monitor.consecutive_failures, + ) + + +@router.delete("/monitors/{source_id}") +async def delete_monitor(source_id: str): + """Delete monitoring for a source.""" + success = await auto_update_service.delete_monitor(source_id) + if not success: + raise HTTPException(status_code=404, detail="Monitor not found") + return {"status": "deleted"} + + +# ============================================================================ +# Notification Endpoints +# ============================================================================ + +@router.get("/notifications", response_model=List[NotificationResponse]) +async def list_notifications( + include_dismissed: bool = False, + limit: int = 100, +): + """Get notifications.""" + try: + notifications = await auto_update_service.get_notifications( + include_dismissed=include_dismissed, + limit=limit, + ) + return [ + NotificationResponse( + id=n.id, + source_id=n.source_id, + source_title=n.source_title, + change_summary=n.change_summary, + diff_highlights=n.diff_highlights, + old_content_preview=n.old_content_preview, + new_content_preview=n.new_content_preview, + severity=n.severity, + is_read=n.is_read, + is_dismissed=n.is_dismissed, + created_at=n.created_at.isoformat(), + ) + for n in notifications + ] + except Exception as e: + logger.error(f"Failed to list notifications: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/notifications/unread", response_model=List[NotificationResponse]) +async def get_unread_notifications(limit: int = 50): + """Get unread notifications.""" + try: + notifications = await auto_update_service.get_unread_notifications(limit=limit) + return [ + NotificationResponse( + id=n.id, + source_id=n.source_id, + source_title=n.source_title, + change_summary=n.change_summary, + diff_highlights=n.diff_highlights, + old_content_preview=n.old_content_preview, + new_content_preview=n.new_content_preview, + severity=n.severity, + is_read=n.is_read, + is_dismissed=n.is_dismissed, + created_at=n.created_at.isoformat(), + ) + for n in notifications + ] + except Exception as e: + logger.error(f"Failed to get unread notifications: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/notifications/count") +async def get_notification_count(): + """Get count of unread notifications.""" + try: + count = auto_update_service.get_unread_count() + return {"unread_count": count} + except Exception as e: + logger.error(f"Failed to get notification count: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/notifications/{notification_id}/read") +async def mark_notification_read(notification_id: str): + """Mark a notification as read.""" + success = await auto_update_service.mark_notification_read(notification_id) + if not success: + raise HTTPException(status_code=404, detail="Notification not found") + return {"status": "marked_read"} + + +@router.post("/notifications/{notification_id}/dismiss") +async def dismiss_notification(notification_id: str): + """Dismiss a notification.""" + success = await auto_update_service.dismiss_notification(notification_id) + if not success: + raise HTTPException(status_code=404, detail="Notification not found") + return {"status": "dismissed"} + + +@router.post("/notifications/mark-all-read") +async def mark_all_notifications_read(): + """Mark all notifications as read.""" + try: + count = await auto_update_service.mark_all_read() + return {"status": "success", "count": count} + except Exception as e: + logger.error(f"Failed to mark all read: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +# ============================================================================ +# Job Endpoints +# ============================================================================ + +@router.post("/jobs/run") +async def trigger_check_job( + background_tasks: BackgroundTasks, + frequency: Optional[str] = None, +): + """ + Trigger a monitoring job to check sources for updates. + + The job runs in the background. Use /jobs/history to check status. + """ + try: + # Check if already running + running = await MonitorJobRun.get_running() + if running: + return { + "status": "already_running", + "job_id": running.id, + "started_at": running.started_at.isoformat(), + } + + # Run in background + background_tasks.add_task( + auto_update_service.run_check_job, + frequency + ) + + return {"status": "started", "message": "Job started in background"} + except Exception as e: + logger.error(f"Failed to trigger job: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/jobs/history", response_model=List[JobRunResponse]) +async def get_job_history(limit: int = 10): + """Get history of monitoring jobs.""" + try: + jobs = await MonitorJobRun.get_latest(limit) + return [ + JobRunResponse( + id=j.id, + started_at=j.started_at.isoformat(), + completed_at=j.completed_at.isoformat() if j.completed_at else None, + status=j.status, + sources_checked=j.sources_checked, + updates_found=j.updates_found, + errors=j.errors, + ) + for j in jobs + ] + except Exception as e: + logger.error(f"Failed to get job history: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/jobs/current") +async def get_current_job(): + """Get currently running job if any.""" + job = await MonitorJobRun.get_running() + if not job: + return {"status": "no_job_running"} + + return JobRunResponse( + id=job.id, + started_at=job.started_at.isoformat(), + completed_at=job.completed_at.isoformat() if job.completed_at else None, + status=job.status, + sources_checked=job.sources_checked, + updates_found=job.updates_found, + errors=job.errors, + ) + + +# ============================================================================ +# Stats Endpoint +# ============================================================================ + +@router.get("/stats", response_model=StatsResponse) +async def get_monitoring_stats(): + """Get monitoring statistics.""" + try: + stats = await auto_update_service.get_stats() + return StatsResponse( + total_monitors=stats.total_monitors, + enabled_monitors=stats.enabled_monitors, + unread_notifications=stats.unread_notifications, + last_job_run=stats.last_job_run.isoformat() if stats.last_job_run else None, + last_job_status=stats.last_job_status, + ) + except Exception as e: + logger.error(f"Failed to get stats: {e}") + raise HTTPException(status_code=500, detail=str(e)) diff --git a/api/routers/notebooks.py b/api/routers/notebooks.py new file mode 100644 index 0000000000000000000000000000000000000000..563ce5e5c6af1e83c7ea6bea36d151f76b056487 --- /dev/null +++ b/api/routers/notebooks.py @@ -0,0 +1,275 @@ +from typing import List, Optional + +from fastapi import APIRouter, HTTPException, Query +from loguru import logger + +from api.models import NotebookCreate, NotebookResponse, NotebookUpdate +from open_notebook.database.repository import ensure_record_id, repo_query +from open_notebook.domain.notebook import Notebook, Source +from open_notebook.exceptions import InvalidInputError + +router = APIRouter() + + +@router.get("/notebooks", response_model=List[NotebookResponse]) +async def get_notebooks( + archived: Optional[bool] = Query(None, description="Filter by archived status"), + order_by: str = Query("updated desc", description="Order by field and direction"), +): + """Get all notebooks with optional filtering and ordering.""" + try: + # Build the query with counts + query = f""" + SELECT *, + count(<-reference.in) as source_count, + count(<-artifact.in) as note_count + FROM notebook + ORDER BY {order_by} + """ + + result = await repo_query(query) + + # Filter by archived status if specified + if archived is not None: + result = [nb for nb in result if nb.get("archived") == archived] + + return [ + NotebookResponse( + id=str(nb.get("id", "")), + name=nb.get("name", ""), + description=nb.get("description", ""), + archived=nb.get("archived", False), + created=str(nb.get("created", "")), + updated=str(nb.get("updated", "")), + source_count=nb.get("source_count", 0), + note_count=nb.get("note_count", 0), + ) + for nb in result + ] + except Exception as e: + logger.error(f"Error fetching notebooks: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching notebooks: {str(e)}" + ) + + +@router.post("/notebooks", response_model=NotebookResponse) +async def create_notebook(notebook: NotebookCreate): + """Create a new notebook.""" + try: + new_notebook = Notebook( + name=notebook.name, + description=notebook.description, + ) + await new_notebook.save() + + return NotebookResponse( + id=new_notebook.id or "", + name=new_notebook.name, + description=new_notebook.description, + archived=new_notebook.archived or False, + created=str(new_notebook.created), + updated=str(new_notebook.updated), + source_count=0, # New notebook has no sources + note_count=0, # New notebook has no notes + ) + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating notebook: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error creating notebook: {str(e)}" + ) + + +@router.get("/notebooks/{notebook_id}", response_model=NotebookResponse) +async def get_notebook(notebook_id: str): + """Get a specific notebook by ID.""" + try: + # Query with counts for single notebook + query = """ + SELECT *, + count(<-reference.in) as source_count, + count(<-artifact.in) as note_count + FROM $notebook_id + """ + result = await repo_query(query, {"notebook_id": ensure_record_id(notebook_id)}) + + if not result: + raise HTTPException(status_code=404, detail="Notebook not found") + + nb = result[0] + return NotebookResponse( + id=str(nb.get("id", "")), + name=nb.get("name", ""), + description=nb.get("description", ""), + archived=nb.get("archived", False), + created=str(nb.get("created", "")), + updated=str(nb.get("updated", "")), + source_count=nb.get("source_count", 0), + note_count=nb.get("note_count", 0), + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching notebook {notebook_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching notebook: {str(e)}" + ) + + +@router.put("/notebooks/{notebook_id}", response_model=NotebookResponse) +async def update_notebook(notebook_id: str, notebook_update: NotebookUpdate): + """Update a notebook.""" + try: + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Update only provided fields + if notebook_update.name is not None: + notebook.name = notebook_update.name + if notebook_update.description is not None: + notebook.description = notebook_update.description + if notebook_update.archived is not None: + notebook.archived = notebook_update.archived + + await notebook.save() + + # Query with counts after update + query = """ + SELECT *, + count(<-reference.in) as source_count, + count(<-artifact.in) as note_count + FROM $notebook_id + """ + result = await repo_query(query, {"notebook_id": ensure_record_id(notebook_id)}) + + if result: + nb = result[0] + return NotebookResponse( + id=str(nb.get("id", "")), + name=nb.get("name", ""), + description=nb.get("description", ""), + archived=nb.get("archived", False), + created=str(nb.get("created", "")), + updated=str(nb.get("updated", "")), + source_count=nb.get("source_count", 0), + note_count=nb.get("note_count", 0), + ) + + # Fallback if query fails + return NotebookResponse( + id=notebook.id or "", + name=notebook.name, + description=notebook.description, + archived=notebook.archived or False, + created=str(notebook.created), + updated=str(notebook.updated), + source_count=0, + note_count=0, + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating notebook {notebook_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error updating notebook: {str(e)}" + ) + + +@router.post("/notebooks/{notebook_id}/sources/{source_id}") +async def add_source_to_notebook(notebook_id: str, source_id: str): + """Add an existing source to a notebook (create the reference).""" + try: + # Check if notebook exists + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Check if source exists + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Check if reference already exists (idempotency) + existing_ref = await repo_query( + "SELECT * FROM reference WHERE out = $source_id AND in = $notebook_id", + { + "notebook_id": ensure_record_id(notebook_id), + "source_id": ensure_record_id(source_id), + }, + ) + + # If reference doesn't exist, create it + if not existing_ref: + await repo_query( + "RELATE $source_id->reference->$notebook_id", + { + "notebook_id": ensure_record_id(notebook_id), + "source_id": ensure_record_id(source_id), + }, + ) + + return {"message": "Source linked to notebook successfully"} + except HTTPException: + raise + except Exception as e: + logger.error( + f"Error linking source {source_id} to notebook {notebook_id}: {str(e)}" + ) + raise HTTPException( + status_code=500, detail=f"Error linking source to notebook: {str(e)}" + ) + + +@router.delete("/notebooks/{notebook_id}/sources/{source_id}") +async def remove_source_from_notebook(notebook_id: str, source_id: str): + """Remove a source from a notebook (delete the reference).""" + try: + # Check if notebook exists + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Delete the reference record linking source to notebook + await repo_query( + "DELETE FROM reference WHERE out = $notebook_id AND in = $source_id", + { + "notebook_id": ensure_record_id(notebook_id), + "source_id": ensure_record_id(source_id), + }, + ) + + return {"message": "Source removed from notebook successfully"} + except HTTPException: + raise + except Exception as e: + logger.error( + f"Error removing source {source_id} from notebook {notebook_id}: {str(e)}" + ) + raise HTTPException( + status_code=500, detail=f"Error removing source from notebook: {str(e)}" + ) + + +@router.delete("/notebooks/{notebook_id}") +async def delete_notebook(notebook_id: str): + """Delete a notebook.""" + try: + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + await notebook.delete() + + return {"message": "Notebook deleted successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting notebook {notebook_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error deleting notebook: {str(e)}" + ) diff --git a/api/routers/notes.py b/api/routers/notes.py new file mode 100644 index 0000000000000000000000000000000000000000..1eed228a8d6434589fc0270b42ed964b6a001fd6 --- /dev/null +++ b/api/routers/notes.py @@ -0,0 +1,180 @@ +from typing import List, Literal, Optional + +from fastapi import APIRouter, HTTPException, Query +from loguru import logger + +from api.models import NoteCreate, NoteResponse, NoteUpdate +from open_notebook.domain.notebook import Note +from open_notebook.exceptions import InvalidInputError + +router = APIRouter() + + +@router.get("/notes", response_model=List[NoteResponse]) +async def get_notes( + notebook_id: Optional[str] = Query(None, description="Filter by notebook ID") +): + """Get all notes with optional notebook filtering.""" + try: + if notebook_id: + # Get notes for a specific notebook + from open_notebook.domain.notebook import Notebook + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + notes = await notebook.get_notes() + else: + # Get all notes + notes = await Note.get_all(order_by="updated desc") + + return [ + NoteResponse( + id=note.id or "", + title=note.title, + content=note.content, + note_type=note.note_type, + created=str(note.created), + updated=str(note.updated), + ) + for note in notes + ] + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching notes: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching notes: {str(e)}") + + +@router.post("/notes", response_model=NoteResponse) +async def create_note(note_data: NoteCreate): + """Create a new note.""" + try: + # Auto-generate title if not provided and it's an AI note + title = note_data.title + if not title and note_data.note_type == "ai" and note_data.content: + from open_notebook.graphs.prompt import graph as prompt_graph + prompt = "Based on the Note below, please provide a Title for this content, with max 15 words" + result = await prompt_graph.ainvoke( + { # type: ignore[arg-type] + "input_text": note_data.content, + "prompt": prompt + } + ) + title = result.get("output", "Untitled Note") + + # Validate note_type + note_type: Optional[Literal["human", "ai"]] = None + if note_data.note_type in ("human", "ai"): + note_type = note_data.note_type # type: ignore[assignment] + elif note_data.note_type is not None: + raise HTTPException(status_code=400, detail="note_type must be 'human' or 'ai'") + + new_note = Note( + title=title, + content=note_data.content, + note_type=note_type, + ) + await new_note.save() + + # Add to notebook if specified + if note_data.notebook_id: + from open_notebook.domain.notebook import Notebook + notebook = await Notebook.get(note_data.notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + await new_note.add_to_notebook(note_data.notebook_id) + + return NoteResponse( + id=new_note.id or "", + title=new_note.title, + content=new_note.content, + note_type=new_note.note_type, + created=str(new_note.created), + updated=str(new_note.updated), + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating note: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error creating note: {str(e)}") + + +@router.get("/notes/{note_id}", response_model=NoteResponse) +async def get_note(note_id: str): + """Get a specific note by ID.""" + try: + note = await Note.get(note_id) + if not note: + raise HTTPException(status_code=404, detail="Note not found") + + return NoteResponse( + id=note.id or "", + title=note.title, + content=note.content, + note_type=note.note_type, + created=str(note.created), + updated=str(note.updated), + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching note {note_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching note: {str(e)}") + + +@router.put("/notes/{note_id}", response_model=NoteResponse) +async def update_note(note_id: str, note_update: NoteUpdate): + """Update a note.""" + try: + note = await Note.get(note_id) + if not note: + raise HTTPException(status_code=404, detail="Note not found") + + # Update only provided fields + if note_update.title is not None: + note.title = note_update.title + if note_update.content is not None: + note.content = note_update.content + if note_update.note_type is not None: + if note_update.note_type in ("human", "ai"): + note.note_type = note_update.note_type # type: ignore[assignment] + else: + raise HTTPException(status_code=400, detail="note_type must be 'human' or 'ai'") + + await note.save() + + return NoteResponse( + id=note.id or "", + title=note.title, + content=note.content, + note_type=note.note_type, + created=str(note.created), + updated=str(note.updated), + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating note {note_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error updating note: {str(e)}") + + +@router.delete("/notes/{note_id}") +async def delete_note(note_id: str): + """Delete a note.""" + try: + note = await Note.get(note_id) + if not note: + raise HTTPException(status_code=404, detail="Note not found") + + await note.delete() + + return {"message": "Note deleted successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting note {note_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting note: {str(e)}") \ No newline at end of file diff --git a/api/routers/ocr.py b/api/routers/ocr.py new file mode 100644 index 0000000000000000000000000000000000000000..9ea05ff6d8c1c2a595879746af6943f5775658eb --- /dev/null +++ b/api/routers/ocr.py @@ -0,0 +1,131 @@ +""" +OCR API Router + +Endpoints for image processing and text extraction. +""" + +from typing import Optional +from fastapi import APIRouter, HTTPException, UploadFile, File, Form +from pydantic import BaseModel +from loguru import logger + +from open_notebook.services.ocr_service import ocr_service, OCRResult, StructuredNote + + +router = APIRouter(prefix="/ocr", tags=["ocr"]) + + +# ============================================================================ +# Request/Response Models +# ============================================================================ + +class OCRBase64Request(BaseModel): + """Request for OCR with base64 encoded image.""" + image: str # Base64 encoded image + structure: bool = True # Whether to structure the result + + +class OCRResponse(BaseModel): + """Response from OCR processing.""" + raw_text: str + confidence: Optional[float] = None + processing_time_ms: int + source_format: str + structured: Optional[StructuredNote] = None + + +# ============================================================================ +# Endpoints +# ============================================================================ + +@router.post("/process", response_model=OCRResponse) +async def process_image(request: OCRBase64Request): + """ + Process a base64 encoded image and extract text using OCR. + + Supports PNG, JPEG, and other common image formats. + Optionally structures the extracted text using LLM. + """ + try: + # Run OCR + result = ocr_service.process_image_base64(request.image) + + # Structure if requested + structured = None + if request.structure and result.raw_text: + try: + structured = await ocr_service.structure_text(result.raw_text) + except Exception as e: + logger.warning(f"Failed to structure OCR text: {e}") + + return OCRResponse( + raw_text=result.raw_text, + confidence=result.confidence, + processing_time_ms=result.processing_time_ms, + source_format=result.source_format, + structured=structured, + ) + + except RuntimeError as e: + raise HTTPException(status_code=503, detail=str(e)) + except Exception as e: + logger.error(f"OCR processing failed: {e}") + raise HTTPException(status_code=500, detail=f"OCR processing failed: {str(e)}") + + +@router.post("/upload", response_model=OCRResponse) +async def process_uploaded_image( + file: UploadFile = File(...), + structure: bool = Form(True), +): + """ + Process an uploaded image file and extract text using OCR. + + Supports PNG, JPEG, and other common image formats. + """ + # Validate file type + allowed_types = ["image/png", "image/jpeg", "image/jpg", "image/gif", "image/webp", "image/bmp"] + if file.content_type not in allowed_types: + raise HTTPException( + status_code=400, + detail=f"Invalid file type. Allowed types: {', '.join(allowed_types)}" + ) + + try: + # Read file + contents = await file.read() + + # Run OCR + result = ocr_service.process_image_bytes(contents) + + # Structure if requested + structured = None + if structure and result.raw_text: + try: + structured = await ocr_service.structure_text(result.raw_text) + except Exception as e: + logger.warning(f"Failed to structure OCR text: {e}") + + return OCRResponse( + raw_text=result.raw_text, + confidence=result.confidence, + processing_time_ms=result.processing_time_ms, + source_format=result.source_format, + structured=structured, + ) + + except RuntimeError as e: + raise HTTPException(status_code=503, detail=str(e)) + except Exception as e: + logger.error(f"OCR processing failed: {e}") + raise HTTPException(status_code=500, detail=f"OCR processing failed: {str(e)}") + + +@router.get("/status") +async def get_ocr_status(): + """Check if OCR service is available.""" + return { + "available": ocr_service.tesseract_available, + "message": "OCR service is ready" if ocr_service.tesseract_available + else "Tesseract is not installed. OCR functionality is unavailable." + } diff --git a/api/routers/podcasts.py b/api/routers/podcasts.py new file mode 100644 index 0000000000000000000000000000000000000000..9f833bca8da7613d6fe34ebaaef466a1340245d7 --- /dev/null +++ b/api/routers/podcasts.py @@ -0,0 +1,231 @@ +from pathlib import Path +from typing import List, Optional +from urllib.parse import unquote, urlparse + +from fastapi import APIRouter, HTTPException +from fastapi.responses import FileResponse +from loguru import logger +from pydantic import BaseModel + +from api.podcast_service import ( + PodcastGenerationRequest, + PodcastGenerationResponse, + PodcastService, +) + +router = APIRouter() + + +class PodcastEpisodeResponse(BaseModel): + id: str + name: str + episode_profile: dict + speaker_profile: dict + briefing: str + audio_file: Optional[str] = None + audio_url: Optional[str] = None + transcript: Optional[dict] = None + outline: Optional[dict] = None + created: Optional[str] = None + job_status: Optional[str] = None + + +def _resolve_audio_path(audio_file: str) -> Path: + if audio_file.startswith("file://"): + parsed = urlparse(audio_file) + return Path(unquote(parsed.path)) + return Path(audio_file) + + +@router.post("/podcasts/generate", response_model=PodcastGenerationResponse) +async def generate_podcast(request: PodcastGenerationRequest): + """ + Generate a podcast episode using Episode Profiles. + Returns immediately with job ID for status tracking. + """ + try: + job_id = await PodcastService.submit_generation_job( + episode_profile_name=request.episode_profile, + speaker_profile_name=request.speaker_profile, + episode_name=request.episode_name, + notebook_id=request.notebook_id, + content=request.content, + briefing_suffix=request.briefing_suffix, + ) + + return PodcastGenerationResponse( + job_id=job_id, + status="submitted", + message=f"Podcast generation started for episode '{request.episode_name}'", + episode_profile=request.episode_profile, + episode_name=request.episode_name, + ) + + except Exception as e: + logger.error(f"Error generating podcast: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Failed to generate podcast: {str(e)}" + ) + + +@router.get("/podcasts/jobs/{job_id}") +async def get_podcast_job_status(job_id: str): + """Get the status of a podcast generation job""" + try: + status_data = await PodcastService.get_job_status(job_id) + return status_data + + except Exception as e: + logger.error(f"Error fetching podcast job status: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Failed to fetch job status: {str(e)}" + ) + + +@router.get("/podcasts/episodes", response_model=List[PodcastEpisodeResponse]) +async def list_podcast_episodes(): + """List all podcast episodes""" + try: + episodes = await PodcastService.list_episodes() + + response_episodes = [] + for episode in episodes: + # Skip incomplete episodes without command or audio + if not episode.command and not episode.audio_file: + continue + + # Get job status if available + job_status = None + if episode.command: + try: + job_status = await episode.get_job_status() + except Exception: + job_status = "unknown" + else: + # No command but has audio file = completed import + job_status = "completed" + + audio_url = None + if episode.audio_file: + audio_path = _resolve_audio_path(episode.audio_file) + if audio_path.exists(): + audio_url = f"/api/podcasts/episodes/{episode.id}/audio" + + response_episodes.append( + PodcastEpisodeResponse( + id=str(episode.id), + name=episode.name, + episode_profile=episode.episode_profile, + speaker_profile=episode.speaker_profile, + briefing=episode.briefing, + audio_file=episode.audio_file, + audio_url=audio_url, + transcript=episode.transcript, + outline=episode.outline, + created=str(episode.created) if episode.created else None, + job_status=job_status, + ) + ) + + return response_episodes + + except Exception as e: + logger.error(f"Error listing podcast episodes: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Failed to list podcast episodes: {str(e)}" + ) + + +@router.get("/podcasts/episodes/{episode_id}", response_model=PodcastEpisodeResponse) +async def get_podcast_episode(episode_id: str): + """Get a specific podcast episode""" + try: + episode = await PodcastService.get_episode(episode_id) + + # Get job status if available + job_status = None + if episode.command: + try: + job_status = await episode.get_job_status() + except Exception: + job_status = "unknown" + else: + # No command but has audio file = completed import + job_status = "completed" if episode.audio_file else "unknown" + + audio_url = None + if episode.audio_file: + audio_path = _resolve_audio_path(episode.audio_file) + if audio_path.exists(): + audio_url = f"/api/podcasts/episodes/{episode.id}/audio" + + return PodcastEpisodeResponse( + id=str(episode.id), + name=episode.name, + episode_profile=episode.episode_profile, + speaker_profile=episode.speaker_profile, + briefing=episode.briefing, + audio_file=episode.audio_file, + audio_url=audio_url, + transcript=episode.transcript, + outline=episode.outline, + created=str(episode.created) if episode.created else None, + job_status=job_status, + ) + + except Exception as e: + logger.error(f"Error fetching podcast episode: {str(e)}") + raise HTTPException(status_code=404, detail=f"Episode not found: {str(e)}") + + +@router.get("/podcasts/episodes/{episode_id}/audio") +async def stream_podcast_episode_audio(episode_id: str): + """Stream the audio file associated with a podcast episode""" + try: + episode = await PodcastService.get_episode(episode_id) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching podcast episode for audio: {str(e)}") + raise HTTPException(status_code=404, detail=f"Episode not found: {str(e)}") + + if not episode.audio_file: + raise HTTPException(status_code=404, detail="Episode has no audio file") + + audio_path = _resolve_audio_path(episode.audio_file) + if not audio_path.exists(): + raise HTTPException(status_code=404, detail="Audio file not found on disk") + + return FileResponse( + audio_path, + media_type="audio/mpeg", + filename=audio_path.name, + ) + + +@router.delete("/podcasts/episodes/{episode_id}") +async def delete_podcast_episode(episode_id: str): + """Delete a podcast episode and its associated audio file""" + try: + # Get the episode first to check if it exists and get the audio file path + episode = await PodcastService.get_episode(episode_id) + + # Delete the physical audio file if it exists + if episode.audio_file: + audio_path = _resolve_audio_path(episode.audio_file) + if audio_path.exists(): + try: + audio_path.unlink() + logger.info(f"Deleted audio file: {audio_path}") + except Exception as e: + logger.warning(f"Failed to delete audio file {audio_path}: {e}") + + # Delete the episode from the database + await episode.delete() + + logger.info(f"Deleted podcast episode: {episode_id}") + return {"message": "Episode deleted successfully", "episode_id": episode_id} + + except Exception as e: + logger.error(f"Error deleting podcast episode: {str(e)}") + raise HTTPException(status_code=500, detail=f"Failed to delete episode: {str(e)}") diff --git a/api/routers/quiz.py b/api/routers/quiz.py new file mode 100644 index 0000000000000000000000000000000000000000..c543482ce6fc6977dd086167eb2827ce0b9d0acf --- /dev/null +++ b/api/routers/quiz.py @@ -0,0 +1,511 @@ +""" +Quiz and Flashcard API Router +""" + +from datetime import datetime +from typing import List, Literal, Optional + +from fastapi import APIRouter, HTTPException +from fsrs import Rating +from loguru import logger +from pydantic import BaseModel, Field + +from open_notebook.domain.quiz import ( + Flashcard, + QuizQuestion, + QuizSession, + UserStudyStats, +) +from open_notebook.services.quiz_service import QuizGenerationService + + +router = APIRouter(prefix="/quiz", tags=["quiz"]) + + +# ==================== Request/Response Models ==================== + +class QuizGenerateRequest(BaseModel): + notebook_id: str = Field(..., description="ID of the notebook to generate quiz from") + num_questions: int = Field(default=10, ge=1, le=50, description="Number of questions") + difficulty: Literal["easy", "medium", "hard", "mixed"] = Field( + default="mixed", description="Quiz difficulty" + ) + source_ids: Optional[List[str]] = Field( + default=None, description="Specific source IDs to use (optional)" + ) + model_id: Optional[str] = Field( + default=None, description="Model ID to use for generation" + ) + + +class QuizQuestionResponse(BaseModel): + id: str + question: str + question_type: str + options: List[str] + difficulty: str + user_answer: Optional[int] = None + is_correct: Optional[bool] = None + # Only include after answering + correct_index: Optional[int] = None + explanation: Optional[str] = None + + +class QuizSessionResponse(BaseModel): + id: str + notebook_id: str + title: Optional[str] + question_count: int + correct_count: int + score: Optional[float] + difficulty: str + status: str + started_at: Optional[str] + completed_at: Optional[str] + created: str + + +class QuizSessionDetailResponse(QuizSessionResponse): + questions: List[QuizQuestionResponse] + + +class SubmitAnswerRequest(BaseModel): + question_id: str + answer: int = Field(..., ge=0, le=3, description="Index of selected answer (0-3)") + time_spent_seconds: Optional[int] = Field( + default=None, description="Time spent on question in seconds" + ) + + +class SubmitAnswerResponse(BaseModel): + is_correct: bool + correct_index: int + explanation: str + session_progress: dict + + +# ==================== Flashcard Models ==================== + +class FlashcardCreateRequest(BaseModel): + notebook_id: str + front: str = Field(..., min_length=1, description="Front of card (question)") + back: str = Field(..., min_length=1, description="Back of card (answer)") + source_id: Optional[str] = None + tags: Optional[List[str]] = Field(default_factory=list) + + +class FlashcardGenerateRequest(BaseModel): + notebook_id: str + num_cards: int = Field(default=20, ge=1, le=100, description="Number of cards to generate") + source_ids: Optional[List[str]] = None + model_id: Optional[str] = None + + +class FlashcardResponse(BaseModel): + id: str + front: str + back: str + tags: List[str] + difficulty: float + state: int + due: Optional[str] + reps: int + created: str + + +class FlashcardReviewRequest(BaseModel): + rating: int = Field( + ..., ge=1, le=4, + description="Review rating: 1=Again, 2=Hard, 3=Good, 4=Easy" + ) + + +class FlashcardStatsResponse(BaseModel): + total: int + new: int + learning: int + review: int + due: int + + +# ==================== Study Stats Models ==================== + +class StudyStatsResponse(BaseModel): + user_id: str + current_streak: int + longest_streak: int + total_xp: int + level: int + badges: List[str] + total_quizzes_completed: int + total_flashcards_reviewed: int + total_correct_answers: int + xp_to_next_level: int + + +# ==================== Quiz Endpoints ==================== + +@router.post("/generate", response_model=QuizSessionResponse) +async def generate_quiz(request: QuizGenerateRequest): + """Generate a new quiz from notebook content""" + try: + session = await QuizGenerationService.generate_quiz( + notebook_id=request.notebook_id, + num_questions=request.num_questions, + difficulty=request.difficulty, + source_ids=request.source_ids, + model_id=request.model_id + ) + + return QuizSessionResponse( + id=session.id, + notebook_id=session.notebook_id, + title=session.title, + question_count=session.question_count, + correct_count=session.correct_count, + score=session.score, + difficulty=session.difficulty, + status=session.status, + started_at=session.started_at.isoformat() if session.started_at else None, + completed_at=session.completed_at.isoformat() if session.completed_at else None, + created=session.created.isoformat() if session.created else "" + ) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error generating quiz: {str(e)}") + logger.exception(e) # This will log the full traceback + raise HTTPException(status_code=500, detail=f"Failed to generate quiz: {str(e)}") + + +@router.get("/sessions", response_model=List[QuizSessionResponse]) +async def get_quiz_sessions( + notebook_id: str, + limit: int = 20 +): + """Get quiz sessions for a notebook""" + try: + sessions = await QuizSession.get_by_notebook(notebook_id, limit) + return [ + QuizSessionResponse( + id=s.id, + notebook_id=s.notebook_id, + title=s.title, + question_count=s.question_count, + correct_count=s.correct_count, + score=s.score, + difficulty=s.difficulty, + status=s.status, + started_at=s.started_at.isoformat() if s.started_at else None, + completed_at=s.completed_at.isoformat() if s.completed_at else None, + created=s.created.isoformat() if s.created else "" + ) + for s in sessions + ] + except Exception as e: + logger.error(f"Error fetching quiz sessions: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to fetch quiz sessions") + + +@router.get("/sessions/{session_id}", response_model=QuizSessionDetailResponse) +async def get_quiz_session(session_id: str, show_answers: bool = False): + """Get a quiz session with questions""" + try: + session = await QuizSession.get(session_id) + questions = await session.get_questions() + + question_responses = [] + for q in questions: + resp = QuizQuestionResponse( + id=q.id, + question=q.question, + question_type=q.question_type, + options=q.options, + difficulty=q.difficulty, + user_answer=q.user_answer, + is_correct=q.is_correct + ) + # Include answers if already answered or show_answers is True + if q.user_answer is not None or show_answers or session.status == "completed": + resp.correct_index = q.correct_index + resp.explanation = q.explanation + question_responses.append(resp) + + return QuizSessionDetailResponse( + id=session.id, + notebook_id=session.notebook_id, + title=session.title, + question_count=session.question_count, + correct_count=session.correct_count, + score=session.score, + difficulty=session.difficulty, + status=session.status, + started_at=session.started_at.isoformat() if session.started_at else None, + completed_at=session.completed_at.isoformat() if session.completed_at else None, + created=session.created.isoformat() if session.created else "", + questions=question_responses + ) + except Exception as e: + logger.error(f"Error fetching quiz session: {str(e)}") + raise HTTPException(status_code=404, detail="Quiz session not found") + + +@router.post("/sessions/{session_id}/answer", response_model=SubmitAnswerResponse) +async def submit_answer(session_id: str, request: SubmitAnswerRequest): + """Submit an answer for a quiz question""" + try: + session = await QuizSession.get(session_id) + + if session.status != "in_progress": + raise HTTPException(status_code=400, detail="Quiz is not in progress") + + question = await session.submit_answer( + question_id=request.question_id, + answer=request.answer, + time_spent_seconds=request.time_spent_seconds + ) + + # Check if all questions answered + questions = await session.get_questions() + answered_count = sum(1 for q in questions if q.user_answer is not None) + + return SubmitAnswerResponse( + is_correct=question.is_correct, + correct_index=question.correct_index, + explanation=question.explanation, + session_progress={ + "answered": answered_count, + "total": session.question_count, + "correct": session.correct_count + } + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error submitting answer: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to submit answer") + + +@router.post("/sessions/{session_id}/complete", response_model=QuizSessionResponse) +async def complete_quiz(session_id: str): + """Complete a quiz session and calculate final score""" + try: + session = await QuizSession.get(session_id) + session = await session.complete() + + # Update user stats (use default user for now) + stats = await UserStudyStats.get_or_create("default_user") + perfect = session.score == 100.0 + await stats.record_quiz_completion(session.score, perfect) + + return QuizSessionResponse( + id=session.id, + notebook_id=session.notebook_id, + title=session.title, + question_count=session.question_count, + correct_count=session.correct_count, + score=session.score, + difficulty=session.difficulty, + status=session.status, + started_at=session.started_at.isoformat() if session.started_at else None, + completed_at=session.completed_at.isoformat() if session.completed_at else None, + created=session.created.isoformat() if session.created else "" + ) + except Exception as e: + logger.error(f"Error completing quiz: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to complete quiz") + + +# ==================== Flashcard Endpoints ==================== + +@router.post("/flashcards", response_model=FlashcardResponse) +async def create_flashcard(request: FlashcardCreateRequest): + """Create a new flashcard""" + try: + flashcard = Flashcard( + notebook_id=request.notebook_id, + source_id=request.source_id, + front=request.front, + back=request.back, + tags=request.tags or [] + ) + await flashcard.save() + + # Award XP for creating flashcard + stats = await UserStudyStats.get_or_create("default_user") + await stats.add_xp(stats.XP_CREATE_FLASHCARD, "Created flashcard") + + return FlashcardResponse( + id=flashcard.id, + front=flashcard.front, + back=flashcard.back, + tags=flashcard.tags or [], + difficulty=flashcard.difficulty, + state=flashcard.state, + due=flashcard.due.isoformat() if flashcard.due else None, + reps=flashcard.reps, + created=flashcard.created.isoformat() if flashcard.created else "" + ) + except Exception as e: + logger.error(f"Error creating flashcard: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to create flashcard") + + +@router.post("/flashcards/generate", response_model=List[FlashcardResponse]) +async def generate_flashcards(request: FlashcardGenerateRequest): + """Generate flashcards from notebook content using AI""" + try: + flashcards = await QuizGenerationService.generate_flashcards( + notebook_id=request.notebook_id, + num_cards=request.num_cards, + source_ids=request.source_ids, + model_id=request.model_id + ) + + return [ + FlashcardResponse( + id=f.id, + front=f.front, + back=f.back, + tags=f.tags or [], + difficulty=f.difficulty, + state=f.state, + due=f.due.isoformat() if f.due else None, + reps=f.reps, + created=f.created.isoformat() if f.created else "" + ) + for f in flashcards + ] + except ValueError as e: + logger.error(f"ValueError generating flashcards: {str(e)}") + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error generating flashcards: {str(e)}") + logger.exception(e) # Log full traceback + raise HTTPException(status_code=500, detail=f"Failed to generate flashcards: {str(e)}") + + +@router.get("/flashcards", response_model=List[FlashcardResponse]) +async def get_flashcards( + notebook_id: Optional[str] = None, + due_only: bool = False, + limit: int = 100 +): + """Get flashcards, optionally filtered by notebook or due status""" + try: + if due_only: + flashcards = await Flashcard.get_due_cards(notebook_id, limit) + elif notebook_id: + flashcards = await Flashcard.get_by_notebook(notebook_id, limit) + else: + flashcards = await Flashcard.get_all() + + return [ + FlashcardResponse( + id=f.id, + front=f.front, + back=f.back, + tags=f.tags or [], + difficulty=f.difficulty, + state=f.state, + due=f.due.isoformat() if f.due else None, + reps=f.reps, + created=f.created.isoformat() if f.created else "" + ) + for f in flashcards + ] + except Exception as e: + logger.error(f"Error fetching flashcards: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to fetch flashcards") + + +@router.get("/flashcards/stats", response_model=FlashcardStatsResponse) +async def get_flashcard_stats(notebook_id: Optional[str] = None): + """Get flashcard statistics""" + try: + stats = await Flashcard.get_stats(notebook_id) + return FlashcardStatsResponse(**stats) + except Exception as e: + logger.error(f"Error fetching flashcard stats: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to fetch stats") + + +@router.post("/flashcards/{flashcard_id}/review", response_model=FlashcardResponse) +async def review_flashcard(flashcard_id: str, request: FlashcardReviewRequest): + """Review a flashcard and update FSRS scheduling""" + try: + logger.info(f"Reviewing flashcard {flashcard_id} with rating {request.rating}") + flashcard = await Flashcard.get(flashcard_id) + logger.info(f"Found flashcard: {flashcard.front[:50]}") + + # Map rating int to FSRS Rating enum + rating_map = { + 1: Rating.Again, + 2: Rating.Hard, + 3: Rating.Good, + 4: Rating.Easy + } + rating = rating_map.get(request.rating, Rating.Good) + logger.info(f"Mapped rating to FSRS: {rating}") + + flashcard = await flashcard.review(rating) + logger.info(f"Flashcard reviewed successfully, new due: {flashcard.due}") + + # Update user stats + stats = await UserStudyStats.get_or_create("default_user") + await stats.record_flashcard_review(correct=request.rating >= 3) + logger.info(f"User stats updated") + + return FlashcardResponse( + id=flashcard.id, + front=flashcard.front, + back=flashcard.back, + tags=flashcard.tags or [], + difficulty=flashcard.difficulty if flashcard.difficulty is not None else 0.0, + state=flashcard.state, + due=flashcard.due.isoformat() if flashcard.due else None, + reps=flashcard.reps if hasattr(flashcard, 'reps') and flashcard.reps else 0, + created=flashcard.created.isoformat() if flashcard.created else "" + ) + except Exception as e: + logger.error(f"Error reviewing flashcard: {str(e)}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Failed to review flashcard: {str(e)}") + + +@router.delete("/flashcards/{flashcard_id}") +async def delete_flashcard(flashcard_id: str): + """Delete a flashcard""" + try: + flashcard = await Flashcard.get(flashcard_id) + await flashcard.delete() + return {"message": "Flashcard deleted"} + except Exception as e: + logger.error(f"Error deleting flashcard: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to delete flashcard") + + +# ==================== Study Stats Endpoints ==================== + +@router.get("/stats", response_model=StudyStatsResponse) +async def get_study_stats(user_id: str = "default_user"): + """Get user study statistics""" + try: + stats = await UserStudyStats.get_or_create(user_id) + + xp_to_next = stats.XP_PER_LEVEL - (stats.total_xp % stats.XP_PER_LEVEL) + + return StudyStatsResponse( + user_id=stats.user_id, + current_streak=stats.current_streak, + longest_streak=stats.longest_streak, + total_xp=stats.total_xp, + level=stats.level, + badges=stats.badges, + total_quizzes_completed=stats.total_quizzes_completed, + total_flashcards_reviewed=stats.total_flashcards_reviewed, + total_correct_answers=stats.total_correct_answers, + xp_to_next_level=xp_to_next + ) + except Exception as e: + logger.error(f"Error fetching study stats: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to fetch study stats") diff --git a/api/routers/research.py b/api/routers/research.py new file mode 100644 index 0000000000000000000000000000000000000000..71489f3b2d213c998bb88cab3f6881445ddf9f99 --- /dev/null +++ b/api/routers/research.py @@ -0,0 +1,294 @@ +""" +Research API Router + +Provides endpoints for the multi-agent research pipeline. +""" + +from typing import List, Optional +from datetime import datetime +import uuid + +from fastapi import APIRouter, HTTPException, BackgroundTasks +from pydantic import BaseModel, Field +from loguru import logger + +from open_notebook.graphs.research import run_research, research_graph + + +router = APIRouter(prefix="/research", tags=["research"]) + + +# ============================================================================ +# Request/Response Models +# ============================================================================ + +class ResearchRequest(BaseModel): + """Request to start a research task""" + query: str = Field(..., description="The research question or topic") + source_ids: Optional[List[str]] = Field(default=None, description="Specific source IDs to use") + research_type: Optional[str] = Field(default=None, description="Override research type detection") + llm_config: Optional[dict] = Field(default=None, description="Model configuration overrides") + + +class ResearchProgress(BaseModel): + """Progress update for a research task""" + task_id: str + status: str # pending, routing, researching, fact_checking, synthesizing, reporting, completed, error + current_step: str + progress_percent: int + message: str + started_at: datetime + updated_at: datetime + + +class Citation(BaseModel): + """A citation from the research""" + source_id: str + title: str + quote: Optional[str] = None + + +class ResearchResult(BaseModel): + """The result of a research task""" + task_id: str + query: str + research_type: str + scholar_findings: str + fact_check_results: str + synthesis: str + final_report: str + citations: List[Citation] + metadata: dict + created_at: datetime + completed_at: Optional[datetime] = None + + +class ResearchSummary(BaseModel): + """Summary of a research result for listing""" + task_id: str + query: str + research_type: str + status: str + created_at: datetime + completed_at: Optional[datetime] = None + + +# ============================================================================ +# In-memory storage (replace with database in production) +# ============================================================================ + +_research_tasks: dict = {} +_research_results: dict = {} + + +# ============================================================================ +# Endpoints +# ============================================================================ + +@router.post("/start", response_model=ResearchProgress) +async def start_research(request: ResearchRequest, background_tasks: BackgroundTasks): + """ + Start a new research task. + The research runs asynchronously and progress can be checked via the status endpoint. + """ + task_id = str(uuid.uuid4()) + now = datetime.now() + + progress = ResearchProgress( + task_id=task_id, + status="pending", + current_step="Initializing research pipeline", + progress_percent=0, + message="Research task queued", + started_at=now, + updated_at=now + ) + + _research_tasks[task_id] = progress + + # Run research in background + background_tasks.add_task(execute_research, task_id, request) + + logger.info(f"Started research task {task_id} for query: {request.query[:100]}...") + + return progress + + +async def execute_research(task_id: str, request: ResearchRequest): + """Execute the research pipeline (runs in background)""" + try: + # Update status + _research_tasks[task_id].status = "routing" + _research_tasks[task_id].current_step = "Analyzing query and determining research approach" + _research_tasks[task_id].progress_percent = 10 + _research_tasks[task_id].updated_at = datetime.now() + + # Build config + config = request.llm_config or {} + + # Add timeout protection (5 minutes for async research) + import asyncio + try: + result = await asyncio.wait_for( + run_research(request.query, config), + timeout=300.0 # 5 minutes + ) + except asyncio.TimeoutError: + logger.error(f"Research task {task_id} timed out after 300 seconds") + _research_tasks[task_id].status = "error" + _research_tasks[task_id].message = "Research timed out. Please try a more specific query." + _research_tasks[task_id].updated_at = datetime.now() + return + + # Update progress through stages + _research_tasks[task_id].status = "completed" + _research_tasks[task_id].current_step = "Research complete" + _research_tasks[task_id].progress_percent = 100 + _research_tasks[task_id].message = "Research completed successfully" + _research_tasks[task_id].updated_at = datetime.now() + + # Store result + citations = [ + Citation( + source_id=c.get("source_id", "") or "", + title=c.get("title", "") or "Untitled" + ) + for c in result.get("citations", []) + ] + + _research_results[task_id] = ResearchResult( + task_id=task_id, + query=request.query, + research_type=result.get("research_type", "deep_dive"), + scholar_findings=result.get("scholar_findings", ""), + fact_check_results=result.get("fact_check_results", ""), + synthesis=result.get("synthesis", ""), + final_report=result.get("final_report", ""), + citations=citations, + metadata=result.get("metadata", {}), + created_at=_research_tasks[task_id].started_at, + completed_at=datetime.now() + ) + + logger.info(f"Research task {task_id} completed successfully") + + except Exception as e: + logger.error(f"Research task {task_id} failed: {str(e)}") + logger.exception(e) + _research_tasks[task_id].status = "error" + _research_tasks[task_id].message = f"Research failed: {str(e)}" + _research_tasks[task_id].updated_at = datetime.now() + + +@router.get("/status/{task_id}", response_model=ResearchProgress) +async def get_research_status(task_id: str): + """Get the current status of a research task""" + if task_id not in _research_tasks: + raise HTTPException(status_code=404, detail="Research task not found") + + return _research_tasks[task_id] + + +@router.get("/result/{task_id}", response_model=ResearchResult) +async def get_research_result(task_id: str): + """Get the result of a completed research task""" + if task_id not in _research_results: + if task_id in _research_tasks: + status = _research_tasks[task_id].status + if status != "completed": + raise HTTPException( + status_code=202, + detail=f"Research still in progress. Current status: {status}" + ) + raise HTTPException(status_code=404, detail="Research result not found") + + return _research_results[task_id] + + +@router.get("/history", response_model=List[ResearchSummary]) +async def get_research_history(limit: int = 20, offset: int = 0): + """Get history of research tasks""" + summaries = [] + + for task_id, progress in list(_research_tasks.items())[offset:offset + limit]: + result = _research_results.get(task_id) + summaries.append(ResearchSummary( + task_id=task_id, + query=result.query if result else "Unknown", + research_type=result.research_type if result else "unknown", + status=progress.status, + created_at=progress.started_at, + completed_at=result.completed_at if result else None + )) + + return summaries + + +@router.post("/quick", response_model=ResearchResult) +async def quick_research(request: ResearchRequest): + """ + Run a synchronous research task and return results immediately. + Use for shorter queries where waiting is acceptable. + """ + task_id = str(uuid.uuid4()) + now = datetime.now() + + logger.info(f"Running quick research for query: {request.query[:100]}...") + + try: + config = request.llm_config or {} + + # Add timeout protection (2 minutes for quick research) + import asyncio + try: + result = await asyncio.wait_for( + run_research(request.query, config), + timeout=120.0 # 2 minutes + ) + except asyncio.TimeoutError: + logger.error(f"Quick research timed out after 120 seconds") + raise HTTPException( + status_code=408, + detail="Research took too long to complete. Please try a more specific query or use the async endpoint." + ) + + citations = [ + Citation( + source_id=c.get("source_id", "") or "", + title=c.get("title", "") or "Untitled" + ) + for c in result.get("citations", []) + ] + + return ResearchResult( + task_id=task_id, + query=request.query, + research_type=result.get("research_type", "deep_dive"), + scholar_findings=result.get("scholar_findings", ""), + fact_check_results=result.get("fact_check_results", ""), + synthesis=result.get("synthesis", ""), + final_report=result.get("final_report", ""), + citations=citations, + metadata=result.get("metadata", {}), + created_at=now, + completed_at=datetime.now() + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Quick research failed: {str(e)}") + logger.exception(e) + raise HTTPException(status_code=500, detail=f"Research failed: {str(e)}") + + +@router.delete("/{task_id}") +async def delete_research(task_id: str): + """Delete a research task and its results""" + if task_id not in _research_tasks: + raise HTTPException(status_code=404, detail="Research task not found") + + del _research_tasks[task_id] + if task_id in _research_results: + del _research_results[task_id] + + return {"status": "deleted", "task_id": task_id} diff --git a/api/routers/search.py b/api/routers/search.py new file mode 100644 index 0000000000000000000000000000000000000000..2f39c945160907db3f43cabd4bcc6d18802d3bf7 --- /dev/null +++ b/api/routers/search.py @@ -0,0 +1,249 @@ +import json +from typing import AsyncGenerator + +from fastapi import APIRouter, HTTPException +from fastapi.responses import StreamingResponse +from loguru import logger + +from api.models import AskRequest, AskResponse, DirectAskRequest, SearchRequest, SearchResponse +from open_notebook.domain.models import Model, model_manager +from open_notebook.domain.notebook import text_search, vector_search +from open_notebook.exceptions import DatabaseOperationError, InvalidInputError +from open_notebook.graphs.ask import graph as ask_graph + +router = APIRouter() + + +@router.post("/search", response_model=SearchResponse) +async def search_knowledge_base(search_request: SearchRequest): + """Search the knowledge base using text or vector search.""" + try: + if search_request.type == "vector": + # Check if embedding model is available for vector search + if not await model_manager.get_embedding_model(): + raise HTTPException( + status_code=400, + detail="Vector search requires an embedding model. Please configure one in the Models section.", + ) + + results = await vector_search( + keyword=search_request.query, + results=search_request.limit, + source=search_request.search_sources, + note=search_request.search_notes, + minimum_score=search_request.minimum_score, + ) + else: + # Text search + results = await text_search( + keyword=search_request.query, + results=search_request.limit, + source=search_request.search_sources, + note=search_request.search_notes, + ) + + return SearchResponse( + results=results or [], + total_count=len(results) if results else 0, + search_type=search_request.type, + ) + + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except DatabaseOperationError as e: + logger.error(f"Database error during search: {str(e)}") + raise HTTPException(status_code=500, detail=f"Search failed: {str(e)}") + except Exception as e: + logger.error(f"Unexpected error during search: {str(e)}") + raise HTTPException(status_code=500, detail=f"Search failed: {str(e)}") + + +async def stream_ask_response( + question: str, strategy_model: Model, answer_model: Model, final_answer_model: Model +) -> AsyncGenerator[str, None]: + """Stream the ask response as Server-Sent Events.""" + try: + final_answer = None + + async for chunk in ask_graph.astream( + input=dict(question=question), # type: ignore[arg-type] + config=dict( + configurable=dict( + strategy_model=strategy_model.id, + answer_model=answer_model.id, + final_answer_model=final_answer_model.id, + ) + ), + stream_mode="updates", + ): + if "agent" in chunk: + strategy_data = { + "type": "strategy", + "reasoning": chunk["agent"]["strategy"].reasoning, + "searches": [ + {"term": search.term, "instructions": search.instructions} + for search in chunk["agent"]["strategy"].searches + ], + } + yield f"data: {json.dumps(strategy_data)}\n\n" + + elif "provide_answer" in chunk: + for answer in chunk["provide_answer"]["answers"]: + answer_data = {"type": "answer", "content": answer} + yield f"data: {json.dumps(answer_data)}\n\n" + + elif "write_final_answer" in chunk: + final_answer = chunk["write_final_answer"]["final_answer"] + final_data = {"type": "final_answer", "content": final_answer} + yield f"data: {json.dumps(final_data)}\n\n" + + # Send completion signal + completion_data = {"type": "complete", "final_answer": final_answer} + yield f"data: {json.dumps(completion_data)}\n\n" + + except Exception as e: + logger.error(f"Error in ask streaming: {str(e)}") + error_data = {"type": "error", "message": str(e)} + yield f"data: {json.dumps(error_data)}\n\n" + + +@router.post("/search/ask") +async def ask_knowledge_base(ask_request: AskRequest): + """Ask the knowledge base a question using AI models.""" + try: + # Validate models exist + strategy_model = await Model.get(ask_request.strategy_model) + answer_model = await Model.get(ask_request.answer_model) + final_answer_model = await Model.get(ask_request.final_answer_model) + + if not strategy_model: + raise HTTPException( + status_code=400, + detail=f"Strategy model {ask_request.strategy_model} not found", + ) + if not answer_model: + raise HTTPException( + status_code=400, + detail=f"Answer model {ask_request.answer_model} not found", + ) + if not final_answer_model: + raise HTTPException( + status_code=400, + detail=f"Final answer model {ask_request.final_answer_model} not found", + ) + + # Check if embedding model is available + if not await model_manager.get_embedding_model(): + raise HTTPException( + status_code=400, + detail="Ask feature requires an embedding model. Please configure one in the Models section.", + ) + + # For streaming response + return StreamingResponse( + stream_ask_response( + ask_request.question, strategy_model, answer_model, final_answer_model + ), + media_type="text/plain", + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error in ask endpoint: {str(e)}") + raise HTTPException(status_code=500, detail=f"Ask operation failed: {str(e)}") + + +@router.post("/search/ask/simple", response_model=AskResponse) +async def ask_knowledge_base_simple(ask_request: AskRequest): + """Ask the knowledge base a question and return a simple response (non-streaming).""" + try: + # Validate models exist + strategy_model = await Model.get(ask_request.strategy_model) + answer_model = await Model.get(ask_request.answer_model) + final_answer_model = await Model.get(ask_request.final_answer_model) + + if not strategy_model: + raise HTTPException( + status_code=400, + detail=f"Strategy model {ask_request.strategy_model} not found", + ) + if not answer_model: + raise HTTPException( + status_code=400, + detail=f"Answer model {ask_request.answer_model} not found", + ) + if not final_answer_model: + raise HTTPException( + status_code=400, + detail=f"Final answer model {ask_request.final_answer_model} not found", + ) + + # Check if embedding model is available + if not await model_manager.get_embedding_model(): + raise HTTPException( + status_code=400, + detail="Ask feature requires an embedding model. Please configure one in the Models section.", + ) + + # Run the ask graph and get final result + final_answer = None + async for chunk in ask_graph.astream( + input=dict(question=ask_request.question), # type: ignore[arg-type] + config=dict( + configurable=dict( + strategy_model=strategy_model.id, + answer_model=answer_model.id, + final_answer_model=final_answer_model.id, + ) + ), + stream_mode="updates", + ): + if "write_final_answer" in chunk: + final_answer = chunk["write_final_answer"]["final_answer"] + + if not final_answer: + raise HTTPException(status_code=500, detail="No answer generated") + + return AskResponse(answer=final_answer, question=ask_request.question) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error in ask simple endpoint: {str(e)}") + raise HTTPException(status_code=500, detail=f"Ask operation failed: {str(e)}") + + +@router.post("/search/ask/direct", response_model=AskResponse) +async def ask_ai_direct(request: DirectAskRequest): + """ + Ask AI directly without RAG/knowledge retrieval. + This uses the LLM's general knowledge to answer questions. + """ + try: + from open_notebook.graphs.utils import provision_langchain_model + + # Use the default chat model for direct questions + model = await provision_langchain_model( + content=request.question, + model_id=request.model_id, + default_type="chat", + max_tokens=2048 + ) + + # Create prompt for direct AI response + prompt = f"""You are a helpful AI assistant. Answer the following question directly using your knowledge. +Be clear, concise, and informative. + +Question: {request.question} + +Answer:""" + + response = await model.ainvoke(prompt) + answer = response.content if hasattr(response, 'content') else str(response) + + return AskResponse(answer=answer, question=request.question) + + except Exception as e: + logger.error(f"Error in direct AI endpoint: {str(e)}") + raise HTTPException(status_code=500, detail=f"Direct AI query failed: {str(e)}") diff --git a/api/routers/settings.py b/api/routers/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..c5eabeb2dc876f50718653cec77b2976a90e7868 --- /dev/null +++ b/api/routers/settings.py @@ -0,0 +1,79 @@ +from fastapi import APIRouter, HTTPException +from loguru import logger + +from api.models import SettingsResponse, SettingsUpdate +from open_notebook.domain.content_settings import ContentSettings +from open_notebook.exceptions import InvalidInputError + +router = APIRouter() + + +@router.get("/settings", response_model=SettingsResponse) +async def get_settings(): + """Get all application settings.""" + try: + settings: ContentSettings = await ContentSettings.get_instance() # type: ignore[assignment] + + return SettingsResponse( + default_content_processing_engine_doc=settings.default_content_processing_engine_doc, + default_content_processing_engine_url=settings.default_content_processing_engine_url, + default_embedding_option=settings.default_embedding_option, + auto_delete_files=settings.auto_delete_files, + youtube_preferred_languages=settings.youtube_preferred_languages, + ) + except Exception as e: + logger.error(f"Error fetching settings: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching settings: {str(e)}") + + +@router.put("/settings", response_model=SettingsResponse) +async def update_settings(settings_update: SettingsUpdate): + """Update application settings.""" + try: + settings: ContentSettings = await ContentSettings.get_instance() # type: ignore[assignment] + + # Update only provided fields + if settings_update.default_content_processing_engine_doc is not None: + # Cast to proper literal type + from typing import Literal, cast + settings.default_content_processing_engine_doc = cast( + Literal["auto", "docling", "simple"], + settings_update.default_content_processing_engine_doc + ) + if settings_update.default_content_processing_engine_url is not None: + from typing import Literal, cast + settings.default_content_processing_engine_url = cast( + Literal["auto", "firecrawl", "jina", "simple"], + settings_update.default_content_processing_engine_url + ) + if settings_update.default_embedding_option is not None: + from typing import Literal, cast + settings.default_embedding_option = cast( + Literal["ask", "always", "never"], + settings_update.default_embedding_option + ) + if settings_update.auto_delete_files is not None: + from typing import Literal, cast + settings.auto_delete_files = cast( + Literal["yes", "no"], + settings_update.auto_delete_files + ) + if settings_update.youtube_preferred_languages is not None: + settings.youtube_preferred_languages = settings_update.youtube_preferred_languages + + await settings.update() + + return SettingsResponse( + default_content_processing_engine_doc=settings.default_content_processing_engine_doc, + default_content_processing_engine_url=settings.default_content_processing_engine_url, + default_embedding_option=settings.default_embedding_option, + auto_delete_files=settings.auto_delete_files, + youtube_preferred_languages=settings.youtube_preferred_languages, + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating settings: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error updating settings: {str(e)}") \ No newline at end of file diff --git a/api/routers/source_chat.py b/api/routers/source_chat.py new file mode 100644 index 0000000000000000000000000000000000000000..38d31d2c1e8d1cd0bf608c8adb508d4c461d81b4 --- /dev/null +++ b/api/routers/source_chat.py @@ -0,0 +1,446 @@ +import asyncio +import json +from typing import AsyncGenerator, List, Optional + +from fastapi import APIRouter, HTTPException, Path +from fastapi.responses import StreamingResponse +from langchain_core.messages import HumanMessage +from langchain_core.runnables import RunnableConfig +from loguru import logger +from pydantic import BaseModel, Field + +from open_notebook.database.repository import ensure_record_id, repo_query +from open_notebook.domain.notebook import ChatSession, Source +from open_notebook.exceptions import ( + NotFoundError, +) +from open_notebook.graphs.source_chat import source_chat_graph as source_chat_graph + +router = APIRouter() + +# Request/Response models +class CreateSourceChatSessionRequest(BaseModel): + source_id: str = Field(..., description="Source ID to create chat session for") + title: Optional[str] = Field(None, description="Optional session title") + model_override: Optional[str] = Field(None, description="Optional model override for this session") + +class UpdateSourceChatSessionRequest(BaseModel): + title: Optional[str] = Field(None, description="New session title") + model_override: Optional[str] = Field(None, description="Model override for this session") + +class ChatMessage(BaseModel): + id: str = Field(..., description="Message ID") + type: str = Field(..., description="Message type (human|ai)") + content: str = Field(..., description="Message content") + timestamp: Optional[str] = Field(None, description="Message timestamp") + +class ContextIndicator(BaseModel): + sources: List[str] = Field(default_factory=list, description="Source IDs used in context") + insights: List[str] = Field(default_factory=list, description="Insight IDs used in context") + notes: List[str] = Field(default_factory=list, description="Note IDs used in context") + +class SourceChatSessionResponse(BaseModel): + id: str = Field(..., description="Session ID") + title: str = Field(..., description="Session title") + source_id: str = Field(..., description="Source ID") + model_override: Optional[str] = Field(None, description="Model override for this session") + created: str = Field(..., description="Creation timestamp") + updated: str = Field(..., description="Last update timestamp") + message_count: Optional[int] = Field(None, description="Number of messages in session") + +class SourceChatSessionWithMessagesResponse(SourceChatSessionResponse): + messages: List[ChatMessage] = Field(default_factory=list, description="Session messages") + context_indicators: Optional[ContextIndicator] = Field(None, description="Context indicators from last response") + +class SendMessageRequest(BaseModel): + message: str = Field(..., description="User message content") + model_override: Optional[str] = Field(None, description="Optional model override for this message") + +class SuccessResponse(BaseModel): + success: bool = Field(True, description="Operation success status") + message: str = Field(..., description="Success message") + + +@router.post("/sources/{source_id}/chat/sessions", response_model=SourceChatSessionResponse) +async def create_source_chat_session( + request: CreateSourceChatSessionRequest, + source_id: str = Path(..., description="Source ID") +): + """Create a new chat session for a source.""" + try: + # Verify source exists + full_source_id = source_id if source_id.startswith("source:") else f"source:{source_id}" + source = await Source.get(full_source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Create new session with model_override support + session = ChatSession( + title=request.title or f"Source Chat {asyncio.get_event_loop().time():.0f}", + model_override=request.model_override + ) + await session.save() + + # Relate session to source using "refers_to" relation + await session.relate("refers_to", full_source_id) + + return SourceChatSessionResponse( + id=session.id or "", + title=session.title or "Untitled Session", + source_id=source_id, + model_override=session.model_override, + created=str(session.created), + updated=str(session.updated), + message_count=0 + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Source not found") + except Exception as e: + logger.error(f"Error creating source chat session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error creating source chat session: {str(e)}") + + +@router.get("/sources/{source_id}/chat/sessions", response_model=List[SourceChatSessionResponse]) +async def get_source_chat_sessions( + source_id: str = Path(..., description="Source ID") +): + """Get all chat sessions for a source.""" + try: + # Verify source exists + full_source_id = source_id if source_id.startswith("source:") else f"source:{source_id}" + source = await Source.get(full_source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Get sessions that refer to this source - first get relations, then sessions + relations = await repo_query( + "SELECT in FROM refers_to WHERE out = $source_id", + {"source_id": ensure_record_id(full_source_id)} + ) + + sessions = [] + for relation in relations: + session_id = relation.get("in") + if session_id: + session_result = await repo_query(f"SELECT * FROM {session_id}") + if session_result and len(session_result) > 0: + session_data = session_result[0] + sessions.append(SourceChatSessionResponse( + id=session_data.get("id") or "", + title=session_data.get("title") or "Untitled Session", + source_id=source_id, + model_override=session_data.get("model_override"), + created=str(session_data.get("created")), + updated=str(session_data.get("updated")), + message_count=0 # TODO: Add message count if needed + )) + + # Sort sessions by created date (newest first) + sessions.sort(key=lambda x: x.created, reverse=True) + return sessions + except NotFoundError: + raise HTTPException(status_code=404, detail="Source not found") + except Exception as e: + logger.error(f"Error fetching source chat sessions: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching source chat sessions: {str(e)}") + + +@router.get("/sources/{source_id}/chat/sessions/{session_id}", response_model=SourceChatSessionWithMessagesResponse) +async def get_source_chat_session( + source_id: str = Path(..., description="Source ID"), + session_id: str = Path(..., description="Session ID") +): + """Get a specific source chat session with its messages.""" + try: + # Verify source exists + full_source_id = source_id if source_id.startswith("source:") else f"source:{source_id}" + source = await Source.get(full_source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Get session + full_session_id = session_id if session_id.startswith("chat_session:") else f"chat_session:{session_id}" + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + # Verify session is related to this source + relation_query = await repo_query( + "SELECT * FROM refers_to WHERE in = $session_id AND out = $source_id", + {"session_id": ensure_record_id(full_session_id), "source_id": ensure_record_id(full_source_id)} + ) + + if not relation_query: + raise HTTPException(status_code=404, detail="Session not found for this source") + + # Get session state from LangGraph to retrieve messages + thread_state = source_chat_graph.get_state( + config=RunnableConfig(configurable={"thread_id": session_id}) + ) + + # Extract messages from state + messages: list[ChatMessage] = [] + context_indicators = None + + if thread_state and thread_state.values: + # Extract messages + if "messages" in thread_state.values: + for msg in thread_state.values["messages"]: + messages.append(ChatMessage( + id=getattr(msg, 'id', f"msg_{len(messages)}"), + type=msg.type if hasattr(msg, 'type') else 'unknown', + content=msg.content if hasattr(msg, 'content') else str(msg), + timestamp=None # LangChain messages don't have timestamps by default + )) + + # Extract context indicators from the last state + if "context_indicators" in thread_state.values: + context_data = thread_state.values["context_indicators"] + context_indicators = ContextIndicator( + sources=context_data.get("sources", []), + insights=context_data.get("insights", []), + notes=context_data.get("notes", []) + ) + + return SourceChatSessionWithMessagesResponse( + id=session.id or "", + title=session.title or "Untitled Session", + source_id=source_id, + model_override=getattr(session, 'model_override', None), + created=str(session.created), + updated=str(session.updated), + message_count=len(messages), + messages=messages, + context_indicators=context_indicators + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Source or session not found") + except Exception as e: + logger.error(f"Error fetching source chat session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching source chat session: {str(e)}") + + +@router.put("/sources/{source_id}/chat/sessions/{session_id}", response_model=SourceChatSessionResponse) +async def update_source_chat_session( + request: UpdateSourceChatSessionRequest, + source_id: str = Path(..., description="Source ID"), + session_id: str = Path(..., description="Session ID") +): + """Update source chat session title and/or model override.""" + try: + # Verify source exists + full_source_id = source_id if source_id.startswith("source:") else f"source:{source_id}" + source = await Source.get(full_source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Get session + full_session_id = session_id if session_id.startswith("chat_session:") else f"chat_session:{session_id}" + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + # Verify session is related to this source + relation_query = await repo_query( + "SELECT * FROM refers_to WHERE in = $session_id AND out = $source_id", + {"session_id": ensure_record_id(full_session_id), "source_id": ensure_record_id(full_source_id)} + ) + + if not relation_query: + raise HTTPException(status_code=404, detail="Session not found for this source") + + # Update session fields + if request.title is not None: + session.title = request.title + if request.model_override is not None: + session.model_override = request.model_override + + await session.save() + + return SourceChatSessionResponse( + id=session.id or "", + title=session.title or "Untitled Session", + source_id=source_id, + model_override=getattr(session, 'model_override', None), + created=str(session.created), + updated=str(session.updated), + message_count=0 + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Source or session not found") + except Exception as e: + logger.error(f"Error updating source chat session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error updating source chat session: {str(e)}") + + +@router.delete("/sources/{source_id}/chat/sessions/{session_id}", response_model=SuccessResponse) +async def delete_source_chat_session( + source_id: str = Path(..., description="Source ID"), + session_id: str = Path(..., description="Session ID") +): + """Delete a source chat session.""" + try: + # Verify source exists + full_source_id = source_id if source_id.startswith("source:") else f"source:{source_id}" + source = await Source.get(full_source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Get session + full_session_id = session_id if session_id.startswith("chat_session:") else f"chat_session:{session_id}" + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + # Verify session is related to this source + relation_query = await repo_query( + "SELECT * FROM refers_to WHERE in = $session_id AND out = $source_id", + {"session_id": ensure_record_id(full_session_id), "source_id": ensure_record_id(full_source_id)} + ) + + if not relation_query: + raise HTTPException(status_code=404, detail="Session not found for this source") + + await session.delete() + + return SuccessResponse( + success=True, + message="Source chat session deleted successfully" + ) + except NotFoundError: + raise HTTPException(status_code=404, detail="Source or session not found") + except Exception as e: + logger.error(f"Error deleting source chat session: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting source chat session: {str(e)}") + + +async def stream_source_chat_response( + session_id: str, + source_id: str, + message: str, + model_override: Optional[str] = None +) -> AsyncGenerator[str, None]: + """Stream the source chat response as Server-Sent Events.""" + try: + # Get current state + current_state = source_chat_graph.get_state( + config=RunnableConfig(configurable={"thread_id": session_id}) + ) + + # Prepare state for execution + state_values = current_state.values if current_state else {} + state_values["messages"] = state_values.get("messages", []) + state_values["source_id"] = source_id + state_values["model_override"] = model_override + + # Add user message to state + user_message = HumanMessage(content=message) + state_values["messages"].append(user_message) + + # Send user message event + user_event = { + "type": "user_message", + "content": message, + "timestamp": None + } + yield f"data: {json.dumps(user_event)}\n\n" + + # Execute source chat graph synchronously (like notebook chat does) + result = source_chat_graph.invoke( + input=state_values, # type: ignore[arg-type] + config=RunnableConfig( + configurable={ + "thread_id": session_id, + "model_id": model_override + } + ) + ) + + # Stream the complete AI response + if "messages" in result: + for msg in result["messages"]: + if hasattr(msg, 'type') and msg.type == 'ai': + ai_event = { + "type": "ai_message", + "content": msg.content if hasattr(msg, 'content') else str(msg), + "timestamp": None + } + yield f"data: {json.dumps(ai_event)}\n\n" + + # Stream context indicators + if "context_indicators" in result: + context_event = { + "type": "context_indicators", + "data": result["context_indicators"] + } + yield f"data: {json.dumps(context_event)}\n\n" + + # Send completion signal + completion_event = {"type": "complete"} + yield f"data: {json.dumps(completion_event)}\n\n" + + except Exception as e: + logger.error(f"Error in source chat streaming: {str(e)}") + error_event = {"type": "error", "message": str(e)} + yield f"data: {json.dumps(error_event)}\n\n" + + +@router.post("/sources/{source_id}/chat/sessions/{session_id}/messages") +async def send_message_to_source_chat( + request: SendMessageRequest, + source_id: str = Path(..., description="Source ID"), + session_id: str = Path(..., description="Session ID") +): + """Send a message to source chat session with SSE streaming response.""" + try: + # Verify source exists + full_source_id = source_id if source_id.startswith("source:") else f"source:{source_id}" + source = await Source.get(full_source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Verify session exists and is related to source + full_session_id = session_id if session_id.startswith("chat_session:") else f"chat_session:{session_id}" + session = await ChatSession.get(full_session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + + # Verify session is related to this source + relation_query = await repo_query( + "SELECT * FROM refers_to WHERE in = $session_id AND out = $source_id", + {"session_id": ensure_record_id(full_session_id), "source_id": ensure_record_id(full_source_id)} + ) + + if not relation_query: + raise HTTPException(status_code=404, detail="Session not found for this source") + + if not request.message: + raise HTTPException(status_code=400, detail="Message content is required") + + # Determine model override (request override takes precedence over session override) + model_override = request.model_override or getattr(session, 'model_override', None) + + # Update session timestamp + await session.save() + + # Return streaming response + return StreamingResponse( + stream_source_chat_response( + session_id=session_id, + source_id=full_source_id, + message=request.message, + model_override=model_override + ), + media_type="text/plain", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "Content-Type": "text/plain; charset=utf-8" + } + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error sending message to source chat: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error sending message: {str(e)}") \ No newline at end of file diff --git a/api/routers/sources.py b/api/routers/sources.py new file mode 100644 index 0000000000000000000000000000000000000000..2337ce6c5505e910554088fec98ae425d363b24c --- /dev/null +++ b/api/routers/sources.py @@ -0,0 +1,1040 @@ +import os +from pathlib import Path +from typing import Any, List, Optional + +from fastapi import ( + APIRouter, + Depends, + File, + Form, + HTTPException, + Query, + UploadFile, +) +from fastapi.responses import FileResponse, Response +from loguru import logger +from surreal_commands import execute_command_sync + +from api.command_service import CommandService +from api.models import ( + AssetModel, + CreateSourceInsightRequest, + SourceCreate, + SourceInsightResponse, + SourceListResponse, + SourceResponse, + SourceStatusResponse, + SourceUpdate, +) +from commands.source_commands import SourceProcessingInput +from open_notebook.config import UPLOADS_FOLDER +from open_notebook.database.repository import ensure_record_id, repo_query +from open_notebook.domain.notebook import Notebook, Source +from open_notebook.domain.transformation import Transformation +from open_notebook.exceptions import InvalidInputError + +router = APIRouter() + + +def generate_unique_filename(original_filename: str, upload_folder: str) -> str: + """Generate unique filename like Streamlit app (append counter if file exists).""" + file_path = Path(upload_folder) + file_path.mkdir(parents=True, exist_ok=True) + + # Split filename and extension + stem = Path(original_filename).stem + suffix = Path(original_filename).suffix + + # Check if file exists and generate unique name + counter = 0 + while True: + if counter == 0: + new_filename = original_filename + else: + new_filename = f"{stem} ({counter}){suffix}" + + full_path = file_path / new_filename + if not full_path.exists(): + return str(full_path) + counter += 1 + + +async def save_uploaded_file(upload_file: UploadFile) -> str: + """Save uploaded file to uploads folder and return file path.""" + if not upload_file.filename: + raise ValueError("No filename provided") + + # Generate unique filename + file_path = generate_unique_filename(upload_file.filename, UPLOADS_FOLDER) + + try: + # Save file + with open(file_path, "wb") as f: + content = await upload_file.read() + f.write(content) + + logger.info(f"Saved uploaded file to: {file_path}") + return file_path + except Exception as e: + logger.error(f"Failed to save uploaded file: {e}") + # Clean up partial file if it exists + if os.path.exists(file_path): + os.unlink(file_path) + raise + + +def parse_source_form_data( + type: str = Form(...), + notebook_id: Optional[str] = Form(None), + notebooks: Optional[str] = Form(None), # JSON string of notebook IDs + url: Optional[str] = Form(None), + content: Optional[str] = Form(None), + title: Optional[str] = Form(None), + transformations: Optional[str] = Form(None), # JSON string of transformation IDs + embed: str = Form("false"), # Accept as string, convert to bool + delete_source: str = Form("false"), # Accept as string, convert to bool + async_processing: str = Form("false"), # Accept as string, convert to bool + file: Optional[UploadFile] = File(None), +) -> tuple[SourceCreate, Optional[UploadFile]]: + """Parse form data into SourceCreate model and return upload file separately.""" + import json + + # Convert string booleans to actual booleans + def str_to_bool(value: str) -> bool: + return value.lower() in ("true", "1", "yes", "on") + + embed_bool = str_to_bool(embed) + delete_source_bool = str_to_bool(delete_source) + async_processing_bool = str_to_bool(async_processing) + + # Parse JSON strings + notebooks_list = None + if notebooks: + try: + notebooks_list = json.loads(notebooks) + except json.JSONDecodeError: + logger.error(f"DEBUG - Invalid JSON in notebooks field: {notebooks}") + raise ValueError("Invalid JSON in notebooks field") + + transformations_list = [] + if transformations: + try: + transformations_list = json.loads(transformations) + except json.JSONDecodeError: + logger.error( + f"DEBUG - Invalid JSON in transformations field: {transformations}" + ) + raise ValueError("Invalid JSON in transformations field") + + # Create SourceCreate instance + try: + source_data = SourceCreate( + type=type, + notebook_id=notebook_id, + notebooks=notebooks_list, + url=url, + content=content, + title=title, + file_path=None, # Will be set later if file is uploaded + transformations=transformations_list, + embed=embed_bool, + delete_source=delete_source_bool, + async_processing=async_processing_bool, + ) + pass # SourceCreate instance created successfully + except Exception as e: + logger.error(f"Failed to create SourceCreate instance: {e}") + raise + + return source_data, file + + +@router.get("/sources", response_model=List[SourceListResponse]) +async def get_sources( + notebook_id: Optional[str] = Query(None, description="Filter by notebook ID"), + limit: int = Query(50, ge=1, le=100, description="Number of sources to return (1-100)"), + offset: int = Query(0, ge=0, description="Number of sources to skip"), + sort_by: str = Query("updated", description="Field to sort by (created or updated)"), + sort_order: str = Query("desc", description="Sort order (asc or desc)"), +): + """Get sources with pagination and sorting support.""" + try: + # Validate sort parameters + if sort_by not in ["created", "updated"]: + raise HTTPException(status_code=400, detail="sort_by must be 'created' or 'updated'") + if sort_order.lower() not in ["asc", "desc"]: + raise HTTPException(status_code=400, detail="sort_order must be 'asc' or 'desc'") + + # Build ORDER BY clause + order_clause = f"ORDER BY {sort_by} {sort_order.upper()}" + + # Build the query + if notebook_id: + # Verify notebook exists first + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException(status_code=404, detail="Notebook not found") + + # Query sources for specific notebook - include command field + query = f""" + SELECT id, asset, created, title, updated, topics, command, + (SELECT VALUE count() FROM source_insight WHERE source = $parent.id GROUP ALL)[0].count OR 0 AS insights_count, + ((SELECT VALUE id FROM source_embedding WHERE source = $parent.id LIMIT 1)) != NONE AS embedded + FROM (select value in from reference where out=$notebook_id) + {order_clause} + LIMIT $limit START $offset + """ + result = await repo_query( + query, { + "notebook_id": ensure_record_id(notebook_id), + "limit": limit, + "offset": offset + } + ) + else: + # Query all sources - include command field + query = f""" + SELECT id, asset, created, title, updated, topics, command, + (SELECT VALUE count() FROM source_insight WHERE source = $parent.id GROUP ALL)[0].count OR 0 AS insights_count, + ((SELECT VALUE id FROM source_embedding WHERE source = $parent.id LIMIT 1)) != NONE AS embedded + FROM source + {order_clause} + LIMIT $limit START $offset + """ + result = await repo_query(query, {"limit": limit, "offset": offset}) + + # Extract command IDs for batch status fetching + command_ids = [] + command_to_source = {} + + for row in result: + command = row.get("command") + if command: + command_str = str(command) + command_ids.append(command_str) + command_to_source[command_str] = row["id"] + + # Batch fetch command statuses + command_statuses = {} + if command_ids: + try: + # Get status for all commands in batch (if the library supports it) + # If not, we'll fall back to individual calls, but limit concurrent requests + import asyncio + + from surreal_commands import get_command_status + + async def get_status_safe(command_id: str): + try: + status = await get_command_status(command_id) + return (command_id, status) + except Exception as e: + logger.warning( + f"Failed to get status for command {command_id}: {e}" + ) + return (command_id, None) + + # Limit concurrent requests to avoid overwhelming the command service + semaphore = asyncio.Semaphore(10) + + async def get_status_with_limit(command_id: str): + async with semaphore: + return await get_status_safe(command_id) + + # Fetch statuses concurrently but with limit + status_tasks = [get_status_with_limit(cmd_id) for cmd_id in command_ids] + status_results = await asyncio.gather( + *status_tasks, return_exceptions=True + ) + + # Process results + for result_item in status_results: + if isinstance(result_item, Exception): + continue + if isinstance(result_item, tuple) and len(result_item) == 2: + cmd_id, status = result_item + command_statuses[cmd_id] = status + + except Exception as e: + logger.warning(f"Failed to batch fetch command statuses: {e}") + + # Convert result to response model + response_list = [] + for row in result: + command = row.get("command") + command_id = str(command) if command else None + status = None + processing_info = None + + # Get status information if command exists + if command_id and command_id in command_statuses: + status_obj = command_statuses[command_id] + if status_obj: + status = status_obj.status + # Extract execution metadata from nested result structure + result_data: dict[str, Any] | None = getattr(status_obj, "result", None) + execution_metadata: dict[str, Any] = result_data.get("execution_metadata", {}) if isinstance(result_data, dict) else {} + processing_info = { + "started_at": execution_metadata.get("started_at"), + "completed_at": execution_metadata.get("completed_at"), + "error": getattr(status_obj, "error_message", None), + } + elif command_id: + # Command exists but status couldn't be fetched + status = "unknown" + + response_list.append( + SourceListResponse( + id=row["id"], + title=row.get("title"), + topics=row.get("topics") or [], + asset=AssetModel( + file_path=row["asset"].get("file_path") + if row.get("asset") + else None, + url=row["asset"].get("url") if row.get("asset") else None, + ) + if row.get("asset") + else None, + embedded=row.get("embedded", False), + embedded_chunks=0, # Removed from query - not needed in list view + insights_count=row.get("insights_count", 0), + created=str(row["created"]), + updated=str(row["updated"]), + # Status fields + command_id=command_id, + status=status, + processing_info=processing_info, + ) + ) + + return response_list + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching sources: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching sources: {str(e)}") + + +@router.post("/sources", response_model=SourceResponse) +async def create_source( + form_data: tuple[SourceCreate, Optional[UploadFile]] = Depends( + parse_source_form_data + ), +): + """Create a new source with support for both JSON and multipart form data.""" + source_data, upload_file = form_data + + try: + # Verify all specified notebooks exist (backward compatibility support) + for notebook_id in (source_data.notebooks or []): + notebook = await Notebook.get(notebook_id) + if not notebook: + raise HTTPException( + status_code=404, detail=f"Notebook {notebook_id} not found" + ) + + # Handle file upload if provided + file_path = None + if upload_file and source_data.type == "upload": + try: + file_path = await save_uploaded_file(upload_file) + except Exception as e: + logger.error(f"File upload failed: {e}") + raise HTTPException( + status_code=400, detail=f"File upload failed: {str(e)}" + ) + + # Prepare content_state for processing + content_state: dict[str, Any] = {} + + if source_data.type == "link": + if not source_data.url: + raise HTTPException( + status_code=400, detail="URL is required for link type" + ) + content_state["url"] = source_data.url + elif source_data.type == "upload": + # Use uploaded file path or provided file_path (backward compatibility) + final_file_path = file_path or source_data.file_path + if not final_file_path: + raise HTTPException( + status_code=400, + detail="File upload or file_path is required for upload type", + ) + content_state["file_path"] = final_file_path + content_state["delete_source"] = source_data.delete_source + elif source_data.type == "text": + if not source_data.content: + raise HTTPException( + status_code=400, detail="Content is required for text type" + ) + content_state["content"] = source_data.content + else: + raise HTTPException( + status_code=400, + detail="Invalid source type. Must be link, upload, or text", + ) + + # Validate transformations exist + transformation_ids = source_data.transformations or [] + for trans_id in transformation_ids: + transformation = await Transformation.get(trans_id) + if not transformation: + raise HTTPException( + status_code=404, detail=f"Transformation {trans_id} not found" + ) + + # Branch based on processing mode + if source_data.async_processing: + # ASYNC PATH: Create source record first, then queue command + logger.info("Using async processing path") + + # Create minimal source record - let SurrealDB generate the ID + source = Source( + title=source_data.title or "Processing...", + topics=[], + ) + await source.save() + + # Add source to notebooks immediately so it appears in the UI + # The source_graph will skip adding duplicates + for notebook_id in (source_data.notebooks or []): + await source.add_to_notebook(notebook_id) + + try: + # Import command modules to ensure they're registered + import commands.source_commands # noqa: F401 + + # Submit command for background processing + command_input = SourceProcessingInput( + source_id=str(source.id), + content_state=content_state, + notebook_ids=source_data.notebooks, + transformations=transformation_ids, + embed=source_data.embed, + ) + + command_id = await CommandService.submit_command_job( + "open_notebook", # app name + "process_source", # command name + command_input.model_dump(), + ) + + logger.info(f"Submitted async processing command: {command_id}") + + # Update source with command reference immediately + # command_id already includes 'command:' prefix + source.command = ensure_record_id(command_id) + await source.save() + + # Return source with command info + return SourceResponse( + id=source.id or "", + title=source.title, + topics=source.topics or [], + asset=None, # Will be populated after processing + full_text=None, # Will be populated after processing + embedded=False, # Will be updated after processing + embedded_chunks=0, + created=str(source.created), + updated=str(source.updated), + command_id=command_id, + status="new", + processing_info={"async": True, "queued": True}, + ) + + except Exception as e: + logger.error(f"Failed to submit async processing command: {e}") + # Clean up source record on command submission failure + try: + await source.delete() + except Exception: + pass + # Clean up uploaded file if we created it + if file_path and upload_file: + try: + os.unlink(file_path) + except Exception: + pass + raise HTTPException( + status_code=500, detail=f"Failed to queue processing: {str(e)}" + ) + + else: + # SYNC PATH: Execute synchronously using execute_command_sync + logger.info("Using sync processing path") + + try: + # Import command modules to ensure they're registered + import commands.source_commands # noqa: F401 + + # Create source record - let SurrealDB generate the ID + source = Source( + title=source_data.title or "Processing...", + topics=[], + ) + await source.save() + + # Add source to notebooks immediately so it appears in the UI + # The source_graph will skip adding duplicates + for notebook_id in (source_data.notebooks or []): + await source.add_to_notebook(notebook_id) + + # Execute command synchronously + command_input = SourceProcessingInput( + source_id=str(source.id), + content_state=content_state, + notebook_ids=source_data.notebooks, + transformations=transformation_ids, + embed=source_data.embed, + ) + + result = execute_command_sync( + "open_notebook", # app name + "process_source", # command name + command_input.model_dump(), + timeout=300, # 5 minute timeout for sync processing + ) + + if not result.is_success(): + logger.error(f"Sync processing failed: {result.error_message}") + # Clean up source record + try: + await source.delete() + except Exception: + pass + # Clean up uploaded file if we created it + if file_path and upload_file: + try: + os.unlink(file_path) + except Exception: + pass + raise HTTPException( + status_code=500, + detail=f"Processing failed: {result.error_message}", + ) + + # Get the processed source + if not source.id: + raise HTTPException( + status_code=500, detail="Source ID is missing" + ) + processed_source = await Source.get(source.id) + if not processed_source: + raise HTTPException( + status_code=500, detail="Processed source not found" + ) + + embedded_chunks = await processed_source.get_embedded_chunks() + return SourceResponse( + id=processed_source.id or "", + title=processed_source.title, + topics=processed_source.topics or [], + asset=AssetModel( + file_path=processed_source.asset.file_path + if processed_source.asset + else None, + url=processed_source.asset.url + if processed_source.asset + else None, + ) + if processed_source.asset + else None, + full_text=processed_source.full_text, + embedded=embedded_chunks > 0, + embedded_chunks=embedded_chunks, + created=str(processed_source.created), + updated=str(processed_source.updated), + # No command_id or status for sync processing (legacy behavior) + ) + + except Exception as e: + logger.error(f"Sync processing failed: {e}") + # Clean up uploaded file if we created it + if file_path and upload_file: + try: + os.unlink(file_path) + except Exception: + pass + raise + + except HTTPException: + # Clean up uploaded file on HTTP exceptions if we created it + if file_path and upload_file: + try: + os.unlink(file_path) + except Exception: + pass + raise + except InvalidInputError as e: + # Clean up uploaded file on validation errors if we created it + if file_path and upload_file: + try: + os.unlink(file_path) + except Exception: + pass + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating source: {str(e)}") + # Clean up uploaded file on unexpected errors if we created it + if file_path and upload_file: + try: + os.unlink(file_path) + except Exception: + pass + raise HTTPException(status_code=500, detail=f"Error creating source: {str(e)}") + + +@router.post("/sources/json", response_model=SourceResponse) +async def create_source_json(source_data: SourceCreate): + """Create a new source using JSON payload (legacy endpoint for backward compatibility).""" + # Convert to form data format and call main endpoint + form_data = (source_data, None) + return await create_source(form_data) + + +async def _resolve_source_file(source_id: str) -> tuple[str, str]: + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + file_path = source.asset.file_path if source.asset else None + if not file_path: + raise HTTPException(status_code=404, detail="Source has no file to download") + + safe_root = os.path.realpath(UPLOADS_FOLDER) + resolved_path = os.path.realpath(file_path) + + if not resolved_path.startswith(safe_root): + logger.warning( + f"Blocked download outside uploads directory for source {source_id}: {resolved_path}" + ) + raise HTTPException(status_code=403, detail="Access to file denied") + + if not os.path.exists(resolved_path): + raise HTTPException(status_code=404, detail="File not found on server") + + filename = os.path.basename(resolved_path) + return resolved_path, filename + + +def _is_source_file_available(source: Source) -> Optional[bool]: + if not source or not source.asset or not source.asset.file_path: + return None + + file_path = source.asset.file_path + safe_root = os.path.realpath(UPLOADS_FOLDER) + resolved_path = os.path.realpath(file_path) + + if not resolved_path.startswith(safe_root): + return False + + return os.path.exists(resolved_path) + + +@router.get("/sources/{source_id}", response_model=SourceResponse) +async def get_source(source_id: str): + """Get a specific source by ID.""" + try: + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Get status information if command exists + status = None + processing_info = None + if source.command: + try: + status = await source.get_status() + processing_info = await source.get_processing_progress() + except Exception as e: + logger.warning(f"Failed to get status for source {source_id}: {e}") + status = "unknown" + + embedded_chunks = await source.get_embedded_chunks() + + # Get associated notebooks + notebooks_query = await repo_query( + "SELECT VALUE out FROM reference WHERE in = $source_id", + {"source_id": ensure_record_id(source.id or source_id)} + ) + notebook_ids = [str(nb_id) for nb_id in notebooks_query] if notebooks_query else [] + + return SourceResponse( + id=source.id or "", + title=source.title, + topics=source.topics or [], + asset=AssetModel( + file_path=source.asset.file_path if source.asset else None, + url=source.asset.url if source.asset else None, + ) + if source.asset + else None, + full_text=source.full_text, + embedded=embedded_chunks > 0, + embedded_chunks=embedded_chunks, + file_available=_is_source_file_available(source), + created=str(source.created), + updated=str(source.updated), + # Status fields + command_id=str(source.command) if source.command else None, + status=status, + processing_info=processing_info, + # Notebook associations + notebooks=notebook_ids, + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching source {source_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error fetching source: {str(e)}") + + +@router.head("/sources/{source_id}/download") +async def check_source_file(source_id: str): + """Check if a source has a downloadable file.""" + try: + await _resolve_source_file(source_id) + return Response(status_code=200) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error checking file for source {source_id}: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to verify file") + + +@router.get("/sources/{source_id}/download") +async def download_source_file(source_id: str): + """Download the original file associated with an uploaded source.""" + try: + resolved_path, filename = await _resolve_source_file(source_id) + return FileResponse( + path=resolved_path, + filename=filename, + media_type="application/octet-stream", + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error downloading file for source {source_id}: {str(e)}") + raise HTTPException(status_code=500, detail="Failed to download source file") + + +@router.get("/sources/{source_id}/status", response_model=SourceStatusResponse) +async def get_source_status(source_id: str): + """Get processing status for a source.""" + try: + # First, verify source exists + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Check if this is a legacy source (no command) + if not source.command: + return SourceStatusResponse( + status=None, + message="Legacy source (completed before async processing)", + processing_info=None, + command_id=None, + ) + + # Get command status and processing info + try: + status = await source.get_status() + processing_info = await source.get_processing_progress() + + # Generate descriptive message based on status + if status == "completed": + message = "Source processing completed successfully" + elif status == "failed": + message = "Source processing failed" + elif status == "running": + message = "Source processing in progress" + elif status == "queued": + message = "Source processing queued" + elif status == "unknown": + message = "Source processing status unknown" + else: + message = f"Source processing status: {status}" + + return SourceStatusResponse( + status=status, + message=message, + processing_info=processing_info, + command_id=str(source.command) if source.command else None, + ) + + except Exception as e: + logger.warning(f"Failed to get status for source {source_id}: {e}") + return SourceStatusResponse( + status="unknown", + message="Failed to retrieve processing status", + processing_info=None, + command_id=str(source.command) if source.command else None, + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching status for source {source_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching source status: {str(e)}" + ) + + +@router.put("/sources/{source_id}", response_model=SourceResponse) +async def update_source(source_id: str, source_update: SourceUpdate): + """Update a source.""" + try: + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Update only provided fields + if source_update.title is not None: + source.title = source_update.title + if source_update.topics is not None: + source.topics = source_update.topics + + await source.save() + + embedded_chunks = await source.get_embedded_chunks() + return SourceResponse( + id=source.id or "", + title=source.title, + topics=source.topics or [], + asset=AssetModel( + file_path=source.asset.file_path if source.asset else None, + url=source.asset.url if source.asset else None, + ) + if source.asset + else None, + full_text=source.full_text, + embedded=embedded_chunks > 0, + embedded_chunks=embedded_chunks, + created=str(source.created), + updated=str(source.updated), + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating source {source_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error updating source: {str(e)}") + + +@router.post("/sources/{source_id}/retry", response_model=SourceResponse) +async def retry_source_processing(source_id: str): + """Retry processing for a failed or stuck source.""" + try: + # First, verify source exists + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Check if source already has a running command + if source.command: + try: + status = await source.get_status() + if status in ["running", "queued"]: + raise HTTPException( + status_code=400, + detail="Source is already processing. Cannot retry while processing is active.", + ) + except Exception as e: + logger.warning( + f"Failed to check current status for source {source_id}: {e}" + ) + # Continue with retry if we can't check status + + # Get notebooks that this source belongs to + query = "SELECT notebook FROM reference WHERE source = $source_id" + references = await repo_query(query, {"source_id": source_id}) + notebook_ids = [str(ref["notebook"]) for ref in references] + + if not notebook_ids: + raise HTTPException( + status_code=400, detail="Source is not associated with any notebooks" + ) + + # Prepare content_state based on source asset + content_state = {} + if source.asset: + if source.asset.file_path: + content_state = { + "file_path": source.asset.file_path, + "delete_source": False, # Don't delete on retry + } + elif source.asset.url: + content_state = {"url": source.asset.url} + else: + raise HTTPException( + status_code=400, detail="Source asset has no file_path or url" + ) + else: + # Check if it's a text source by trying to get full_text + if source.full_text: + content_state = {"content": source.full_text} + else: + raise HTTPException( + status_code=400, detail="Cannot determine source content for retry" + ) + + try: + # Import command modules to ensure they're registered + import commands.source_commands # noqa: F401 + + # Submit new command for background processing + command_input = SourceProcessingInput( + source_id=str(source.id), + content_state=content_state, + notebook_ids=notebook_ids, + transformations=[], # Use default transformations on retry + embed=True, # Always embed on retry + ) + + command_id = await CommandService.submit_command_job( + "open_notebook", # app name + "process_source", # command name + command_input.model_dump(), + ) + + logger.info( + f"Submitted retry processing command: {command_id} for source {source_id}" + ) + + # Update source with new command ID + source.command = ensure_record_id(f"command:{command_id}") + await source.save() + + # Get current embedded chunks count + embedded_chunks = await source.get_embedded_chunks() + + # Return updated source response + return SourceResponse( + id=source.id or "", + title=source.title, + topics=source.topics or [], + asset=AssetModel( + file_path=source.asset.file_path if source.asset else None, + url=source.asset.url if source.asset else None, + ) + if source.asset + else None, + full_text=source.full_text, + embedded=embedded_chunks > 0, + embedded_chunks=embedded_chunks, + created=str(source.created), + updated=str(source.updated), + command_id=command_id, + status="queued", + processing_info={"retry": True, "queued": True}, + ) + + except Exception as e: + logger.error( + f"Failed to submit retry processing command for source {source_id}: {e}" + ) + raise HTTPException( + status_code=500, detail=f"Failed to queue retry processing: {str(e)}" + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error retrying source processing for {source_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error retrying source processing: {str(e)}" + ) + + +@router.delete("/sources/{source_id}") +async def delete_source(source_id: str): + """Delete a source.""" + try: + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + await source.delete() + + return {"message": "Source deleted successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting source {source_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting source: {str(e)}") + + +@router.get("/sources/{source_id}/insights", response_model=List[SourceInsightResponse]) +async def get_source_insights(source_id: str): + """Get all insights for a specific source.""" + try: + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + insights = await source.get_insights() + return [ + SourceInsightResponse( + id=insight.id or "", + source_id=source_id, + insight_type=insight.insight_type, + content=insight.content, + created=str(insight.created), + updated=str(insight.updated), + ) + for insight in insights + ] + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching insights for source {source_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching insights: {str(e)}" + ) + + +@router.post("/sources/{source_id}/insights", response_model=SourceInsightResponse) +async def create_source_insight(source_id: str, request: CreateSourceInsightRequest): + """Create a new insight for a source by running a transformation.""" + try: + # Get source + source = await Source.get(source_id) + if not source: + raise HTTPException(status_code=404, detail="Source not found") + + # Get transformation + transformation = await Transformation.get(request.transformation_id) + if not transformation: + raise HTTPException(status_code=404, detail="Transformation not found") + + # Run transformation graph + from open_notebook.graphs.transformation import graph as transform_graph + + await transform_graph.ainvoke( + input=dict(source=source, transformation=transformation) # type: ignore[arg-type] + ) + + # Get the newly created insight (last one) + insights = await source.get_insights() + if insights: + newest = insights[-1] + return SourceInsightResponse( + id=newest.id or "", + source_id=source_id, + insight_type=newest.insight_type, + content=newest.content, + created=str(newest.created), + updated=str(newest.updated), + ) + else: + raise HTTPException(status_code=500, detail="Failed to create insight") + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error creating insight for source {source_id}: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error creating insight: {str(e)}") diff --git a/api/routers/speaker_profiles.py b/api/routers/speaker_profiles.py new file mode 100644 index 0000000000000000000000000000000000000000..3e3366d3e45b8a6d2df9ec8eebd5e2024acdf8e7 --- /dev/null +++ b/api/routers/speaker_profiles.py @@ -0,0 +1,222 @@ +from typing import Any, Dict, List + +from fastapi import APIRouter, HTTPException +from loguru import logger +from pydantic import BaseModel, Field + +from open_notebook.domain.podcast import SpeakerProfile + +router = APIRouter() + + +class SpeakerProfileResponse(BaseModel): + id: str + name: str + description: str + tts_provider: str + tts_model: str + speakers: List[Dict[str, Any]] + + +@router.get("/speaker-profiles", response_model=List[SpeakerProfileResponse]) +async def list_speaker_profiles(): + """List all available speaker profiles""" + try: + profiles = await SpeakerProfile.get_all(order_by="name asc") + + return [ + SpeakerProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + tts_provider=profile.tts_provider, + tts_model=profile.tts_model, + speakers=profile.speakers + ) + for profile in profiles + ] + + except Exception as e: + logger.error(f"Failed to fetch speaker profiles: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to fetch speaker profiles: {str(e)}" + ) + + +@router.get("/speaker-profiles/{profile_name}", response_model=SpeakerProfileResponse) +async def get_speaker_profile(profile_name: str): + """Get a specific speaker profile by name""" + try: + profile = await SpeakerProfile.get_by_name(profile_name) + + if not profile: + raise HTTPException( + status_code=404, + detail=f"Speaker profile '{profile_name}' not found" + ) + + return SpeakerProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + tts_provider=profile.tts_provider, + tts_model=profile.tts_model, + speakers=profile.speakers + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to fetch speaker profile '{profile_name}': {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to fetch speaker profile: {str(e)}" + ) + + +class SpeakerProfileCreate(BaseModel): + name: str = Field(..., description="Unique profile name") + description: str = Field("", description="Profile description") + tts_provider: str = Field(..., description="TTS provider") + tts_model: str = Field(..., description="TTS model name") + speakers: List[Dict[str, Any]] = Field(..., description="Array of speaker configurations") + + +@router.post("/speaker-profiles", response_model=SpeakerProfileResponse) +async def create_speaker_profile(profile_data: SpeakerProfileCreate): + """Create a new speaker profile""" + try: + profile = SpeakerProfile( + name=profile_data.name, + description=profile_data.description, + tts_provider=profile_data.tts_provider, + tts_model=profile_data.tts_model, + speakers=profile_data.speakers + ) + + await profile.save() + + return SpeakerProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + tts_provider=profile.tts_provider, + tts_model=profile.tts_model, + speakers=profile.speakers + ) + + except Exception as e: + logger.error(f"Failed to create speaker profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to create speaker profile: {str(e)}" + ) + + +@router.put("/speaker-profiles/{profile_id}", response_model=SpeakerProfileResponse) +async def update_speaker_profile(profile_id: str, profile_data: SpeakerProfileCreate): + """Update an existing speaker profile""" + try: + profile = await SpeakerProfile.get(profile_id) + + if not profile: + raise HTTPException( + status_code=404, + detail=f"Speaker profile '{profile_id}' not found" + ) + + # Update fields + profile.name = profile_data.name + profile.description = profile_data.description + profile.tts_provider = profile_data.tts_provider + profile.tts_model = profile_data.tts_model + profile.speakers = profile_data.speakers + + await profile.save() + + return SpeakerProfileResponse( + id=str(profile.id), + name=profile.name, + description=profile.description or "", + tts_provider=profile.tts_provider, + tts_model=profile.tts_model, + speakers=profile.speakers + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to update speaker profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to update speaker profile: {str(e)}" + ) + + +@router.delete("/speaker-profiles/{profile_id}") +async def delete_speaker_profile(profile_id: str): + """Delete a speaker profile""" + try: + profile = await SpeakerProfile.get(profile_id) + + if not profile: + raise HTTPException( + status_code=404, + detail=f"Speaker profile '{profile_id}' not found" + ) + + await profile.delete() + + return {"message": "Speaker profile deleted successfully"} + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to delete speaker profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to delete speaker profile: {str(e)}" + ) + + +@router.post("/speaker-profiles/{profile_id}/duplicate", response_model=SpeakerProfileResponse) +async def duplicate_speaker_profile(profile_id: str): + """Duplicate a speaker profile""" + try: + original = await SpeakerProfile.get(profile_id) + + if not original: + raise HTTPException( + status_code=404, + detail=f"Speaker profile '{profile_id}' not found" + ) + + # Create duplicate with modified name + duplicate = SpeakerProfile( + name=f"{original.name} - Copy", + description=original.description, + tts_provider=original.tts_provider, + tts_model=original.tts_model, + speakers=original.speakers + ) + + await duplicate.save() + + return SpeakerProfileResponse( + id=str(duplicate.id), + name=duplicate.name, + description=duplicate.description or "", + tts_provider=duplicate.tts_provider, + tts_model=duplicate.tts_model, + speakers=duplicate.speakers + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to duplicate speaker profile: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to duplicate speaker profile: {str(e)}" + ) \ No newline at end of file diff --git a/api/routers/study_plans.py b/api/routers/study_plans.py new file mode 100644 index 0000000000000000000000000000000000000000..4cbc5267e1f264362210274bc125377574160941 --- /dev/null +++ b/api/routers/study_plans.py @@ -0,0 +1,242 @@ +""" +Study Plan API Router. +AI-generated personalized study schedules. +""" + +from datetime import datetime +from typing import List, Optional +from fastapi import APIRouter, HTTPException, Query +from loguru import logger + +from open_notebook.domain.study_plan import ( + StudyPlan, StudyPlanCreate, StudyPlanUpdate, + StudyPlanFull, StudyPlanWithTopics, StudyPlanStats, + StudyTopic, StudyTopicCreate, StudyTopicUpdate, + StudySession, StudySessionCreate, StudySessionUpdate, + PlanAdjustment, PlanAdjustmentResponse, + WeeklySchedule, DailySchedule, + PlanGenerationRequest, PlanGenerationResult +) +from open_notebook.services.study_plan_service import study_plan_service + +router = APIRouter(prefix="/study-plans", tags=["study-plans"]) + + +# ============ Study Plan Endpoints ============ + +@router.post("", response_model=StudyPlan) +async def create_plan(data: StudyPlanCreate): + """Create a new study plan.""" + return await study_plan_service.create_plan(data) + + +@router.post("/generate", response_model=PlanGenerationResult) +async def generate_plan(request: PlanGenerationRequest): + """Generate a complete study plan using AI.""" + return await study_plan_service.generate_plan(request) + + +@router.get("", response_model=List[StudyPlan]) +async def list_plans( + notebook_id: Optional[str] = Query(None, description="Filter by notebook ID"), + active_only: bool = Query(False, description="Only return active plans") +): + """List study plans.""" + if notebook_id: + plans = await study_plan_service.get_plans_for_notebook(notebook_id) + elif active_only: + plans = await study_plan_service.get_active_plans() + else: + plans = await study_plan_service.get_active_plans() # Default to active + return plans + + +@router.get("/today", response_model=List[StudySession]) +async def get_today_sessions(plan_id: Optional[str] = Query(None)): + """Get study sessions scheduled for today.""" + return await study_plan_service.get_today_sessions(plan_id) + + +@router.get("/{plan_id}", response_model=StudyPlanFull) +async def get_plan(plan_id: str): + """Get a study plan with all details.""" + plan = await study_plan_service.get_plan_full(plan_id) + if not plan: + raise HTTPException(status_code=404, detail="Study plan not found") + return plan + + +@router.patch("/{plan_id}", response_model=StudyPlan) +async def update_plan(plan_id: str, data: StudyPlanUpdate): + """Update a study plan.""" + plan = await study_plan_service.update_plan(plan_id, data) + if not plan: + raise HTTPException(status_code=404, detail="Study plan not found") + return plan + + +@router.delete("/{plan_id}") +async def delete_plan(plan_id: str): + """Delete a study plan and all related data.""" + success = await study_plan_service.delete_plan(plan_id) + if not success: + raise HTTPException(status_code=404, detail="Study plan not found") + return {"status": "deleted", "plan_id": plan_id} + + +@router.get("/{plan_id}/stats", response_model=StudyPlanStats) +async def get_plan_stats(plan_id: str): + """Get statistics for a study plan.""" + return await study_plan_service.get_plan_stats(plan_id) + + +@router.get("/{plan_id}/schedule", response_model=WeeklySchedule) +async def get_weekly_schedule( + plan_id: str, + week_start: Optional[datetime] = Query(None, description="Start of week (defaults to current week)") +): + """Get weekly schedule for a study plan.""" + return await study_plan_service.get_weekly_schedule(plan_id, week_start) + + +# ============ Topic Endpoints ============ + +@router.post("/{plan_id}/topics", response_model=StudyTopic) +async def create_topic(plan_id: str, data: StudyTopicCreate): + """Create a study topic.""" + if data.plan_id != plan_id: + data.plan_id = plan_id + return await study_plan_service.create_topic(data) + + +@router.get("/{plan_id}/topics", response_model=List[StudyTopic]) +async def list_topics(plan_id: str): + """List topics for a study plan.""" + return await study_plan_service.get_topics_for_plan(plan_id) + + +@router.get("/topics/{topic_id}", response_model=StudyTopic) +async def get_topic(topic_id: str): + """Get a study topic.""" + topic = await study_plan_service.get_topic(topic_id) + if not topic: + raise HTTPException(status_code=404, detail="Topic not found") + return topic + + +@router.patch("/topics/{topic_id}", response_model=StudyTopic) +async def update_topic(topic_id: str, data: StudyTopicUpdate): + """Update a study topic.""" + topic = await study_plan_service.update_topic(topic_id, data) + if not topic: + raise HTTPException(status_code=404, detail="Topic not found") + return topic + + +@router.delete("/topics/{topic_id}") +async def delete_topic(topic_id: str): + """Delete a study topic.""" + success = await study_plan_service.delete_topic(topic_id) + if not success: + raise HTTPException(status_code=404, detail="Topic not found") + return {"status": "deleted", "topic_id": topic_id} + + +# ============ Session Endpoints ============ + +@router.post("/{plan_id}/sessions", response_model=StudySession) +async def create_session(plan_id: str, data: StudySessionCreate): + """Create a study session.""" + if data.plan_id != plan_id: + data.plan_id = plan_id + return await study_plan_service.create_session(data) + + +@router.get("/{plan_id}/sessions", response_model=List[StudySession]) +async def list_sessions(plan_id: str): + """List sessions for a study plan.""" + return await study_plan_service.get_sessions_for_plan(plan_id) + + +@router.get("/sessions/{session_id}", response_model=StudySession) +async def get_session(session_id: str): + """Get a study session.""" + session = await study_plan_service.get_session(session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + return session + + +@router.patch("/sessions/{session_id}", response_model=StudySession) +async def update_session(session_id: str, data: StudySessionUpdate): + """Update a study session.""" + session = await study_plan_service.update_session(session_id, data) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + return session + + +@router.post("/sessions/{session_id}/start", response_model=StudySession) +async def start_session(session_id: str): + """Start a study session.""" + session = await study_plan_service.start_session(session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + return session + + +@router.post("/sessions/{session_id}/complete", response_model=StudySession) +async def complete_session( + session_id: str, + rating: Optional[int] = Query(None, ge=1, le=5), + notes: Optional[str] = Query(None) +): + """Complete a study session.""" + try: + logger.info(f"API: complete_session called with session_id={session_id}, rating={rating}, notes={notes}") + session = await study_plan_service.complete_session(session_id, rating, notes) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + logger.info(f"API: Session completed successfully: {session.id}") + return session + except HTTPException: + raise + except Exception as e: + logger.error(f"API: Error completing session: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + + +@router.delete("/sessions/{session_id}") +async def delete_session(session_id: str): + """Delete a study session.""" + success = await study_plan_service.delete_session(session_id) + if not success: + raise HTTPException(status_code=404, detail="Session not found") + return {"status": "deleted", "session_id": session_id} + + +@router.post("/sessions/{session_id}/skip", response_model=StudySession) +async def skip_session( + session_id: str, + reason: Optional[str] = Query(None, description="Reason for skipping") +): + """Skip a study session.""" + session = await study_plan_service.skip_session(session_id, reason) + if not session: + raise HTTPException(status_code=404, detail="Session not found") + return session + + +# ============ Adjustment Endpoints ============ + +@router.get("/{plan_id}/adjustments", response_model=List[PlanAdjustment]) +async def list_adjustments(plan_id: str): + """List adjustments for a study plan.""" + return await study_plan_service.get_adjustments_for_plan(plan_id) + + +@router.post("/adjustments/{adjustment_id}/respond") +async def respond_to_adjustment(adjustment_id: str, response: PlanAdjustmentResponse): + """Accept or reject a plan adjustment.""" + success = await study_plan_service.respond_to_adjustment(adjustment_id, response.accepted) + return {"status": "accepted" if response.accepted else "rejected", "adjustment_id": adjustment_id} diff --git a/api/routers/transformations.py b/api/routers/transformations.py new file mode 100644 index 0000000000000000000000000000000000000000..7242a302385f02feb6f63ce05e6eaa0307b1e92a --- /dev/null +++ b/api/routers/transformations.py @@ -0,0 +1,247 @@ +from typing import List + +from fastapi import APIRouter, HTTPException +from loguru import logger + +from api.models import ( + DefaultPromptResponse, + DefaultPromptUpdate, + TransformationCreate, + TransformationExecuteRequest, + TransformationExecuteResponse, + TransformationResponse, + TransformationUpdate, +) +from open_notebook.domain.models import Model +from open_notebook.domain.transformation import DefaultPrompts, Transformation +from open_notebook.exceptions import InvalidInputError +from open_notebook.graphs.transformation import graph as transformation_graph + +router = APIRouter() + + +@router.get("/transformations", response_model=List[TransformationResponse]) +async def get_transformations(): + """Get all transformations.""" + try: + transformations = await Transformation.get_all(order_by="name asc") + + return [ + TransformationResponse( + id=transformation.id or "", + name=transformation.name, + title=transformation.title, + description=transformation.description, + prompt=transformation.prompt, + apply_default=transformation.apply_default, + created=str(transformation.created), + updated=str(transformation.updated), + ) + for transformation in transformations + ] + except Exception as e: + logger.error(f"Error fetching transformations: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching transformations: {str(e)}" + ) + + +@router.post("/transformations", response_model=TransformationResponse) +async def create_transformation(transformation_data: TransformationCreate): + """Create a new transformation.""" + try: + new_transformation = Transformation( + name=transformation_data.name, + title=transformation_data.title, + description=transformation_data.description, + prompt=transformation_data.prompt, + apply_default=transformation_data.apply_default, + ) + await new_transformation.save() + + return TransformationResponse( + id=new_transformation.id or "", + name=new_transformation.name, + title=new_transformation.title, + description=new_transformation.description, + prompt=new_transformation.prompt, + apply_default=new_transformation.apply_default, + created=str(new_transformation.created), + updated=str(new_transformation.updated), + ) + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating transformation: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error creating transformation: {str(e)}" + ) + + +@router.post("/transformations/execute", response_model=TransformationExecuteResponse) +async def execute_transformation(execute_request: TransformationExecuteRequest): + """Execute a transformation on input text.""" + try: + # Validate transformation exists + transformation = await Transformation.get(execute_request.transformation_id) + if not transformation: + raise HTTPException(status_code=404, detail="Transformation not found") + + # Validate model exists + model = await Model.get(execute_request.model_id) + if not model: + raise HTTPException(status_code=404, detail="Model not found") + + # Execute the transformation + result = await transformation_graph.ainvoke( + dict( # type: ignore[arg-type] + input_text=execute_request.input_text, + transformation=transformation, + ), + config=dict(configurable={"model_id": execute_request.model_id}), + ) + + return TransformationExecuteResponse( + output=result["output"], + transformation_id=execute_request.transformation_id, + model_id=execute_request.model_id, + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error executing transformation: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error executing transformation: {str(e)}" + ) + + +@router.get("/transformations/default-prompt", response_model=DefaultPromptResponse) +async def get_default_prompt(): + """Get the default transformation prompt.""" + try: + default_prompts: DefaultPrompts = await DefaultPrompts.get_instance() # type: ignore[assignment] + + return DefaultPromptResponse( + transformation_instructions=default_prompts.transformation_instructions or "" + ) + except Exception as e: + logger.error(f"Error fetching default prompt: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching default prompt: {str(e)}" + ) + + +@router.put("/transformations/default-prompt", response_model=DefaultPromptResponse) +async def update_default_prompt(prompt_update: DefaultPromptUpdate): + """Update the default transformation prompt.""" + try: + default_prompts: DefaultPrompts = await DefaultPrompts.get_instance() # type: ignore[assignment] + + default_prompts.transformation_instructions = prompt_update.transformation_instructions + await default_prompts.update() + + return DefaultPromptResponse( + transformation_instructions=default_prompts.transformation_instructions + ) + except Exception as e: + logger.error(f"Error updating default prompt: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error updating default prompt: {str(e)}" + ) + + +@router.get( + "/transformations/{transformation_id}", response_model=TransformationResponse +) +async def get_transformation(transformation_id: str): + """Get a specific transformation by ID.""" + try: + transformation = await Transformation.get(transformation_id) + if not transformation: + raise HTTPException(status_code=404, detail="Transformation not found") + + return TransformationResponse( + id=transformation.id or "", + name=transformation.name, + title=transformation.title, + description=transformation.description, + prompt=transformation.prompt, + apply_default=transformation.apply_default, + created=str(transformation.created), + updated=str(transformation.updated), + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error fetching transformation {transformation_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error fetching transformation: {str(e)}" + ) + + +@router.put( + "/transformations/{transformation_id}", response_model=TransformationResponse +) +async def update_transformation( + transformation_id: str, transformation_update: TransformationUpdate +): + """Update a transformation.""" + try: + transformation = await Transformation.get(transformation_id) + if not transformation: + raise HTTPException(status_code=404, detail="Transformation not found") + + # Update only provided fields + if transformation_update.name is not None: + transformation.name = transformation_update.name + if transformation_update.title is not None: + transformation.title = transformation_update.title + if transformation_update.description is not None: + transformation.description = transformation_update.description + if transformation_update.prompt is not None: + transformation.prompt = transformation_update.prompt + if transformation_update.apply_default is not None: + transformation.apply_default = transformation_update.apply_default + + await transformation.save() + + return TransformationResponse( + id=transformation.id or "", + name=transformation.name, + title=transformation.title, + description=transformation.description, + prompt=transformation.prompt, + apply_default=transformation.apply_default, + created=str(transformation.created), + updated=str(transformation.updated), + ) + except HTTPException: + raise + except InvalidInputError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating transformation {transformation_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error updating transformation: {str(e)}" + ) + + +@router.delete("/transformations/{transformation_id}") +async def delete_transformation(transformation_id: str): + """Delete a transformation.""" + try: + transformation = await Transformation.get(transformation_id) + if not transformation: + raise HTTPException(status_code=404, detail="Transformation not found") + + await transformation.delete() + + return {"message": "Transformation deleted successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting transformation {transformation_id}: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error deleting transformation: {str(e)}" + ) diff --git a/api/search_service.py b/api/search_service.py new file mode 100644 index 0000000000000000000000000000000000000000..07d7b6fa5c4f05ea7eabd941be26024367b1cf97 --- /dev/null +++ b/api/search_service.py @@ -0,0 +1,58 @@ +""" +Search service layer using API. +""" + +from typing import Any, Dict, List, Union + +from loguru import logger + +from api.client import api_client + + +class SearchService: + """Service layer for search operations using API.""" + + def __init__(self): + logger.info("Using API for search operations") + + def search( + self, + query: str, + search_type: str = "text", + limit: int = 100, + search_sources: bool = True, + search_notes: bool = True, + minimum_score: float = 0.2 + ) -> List[Dict[str, Any]]: + """Search the knowledge base.""" + response = api_client.search( + query=query, + search_type=search_type, + limit=limit, + search_sources=search_sources, + search_notes=search_notes, + minimum_score=minimum_score + ) + if isinstance(response, dict): + return response.get("results", []) + return [] + + def ask_knowledge_base( + self, + question: str, + strategy_model: str, + answer_model: str, + final_answer_model: str + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Ask the knowledge base a question.""" + response = api_client.ask_simple( + question=question, + strategy_model=strategy_model, + answer_model=answer_model, + final_answer_model=final_answer_model + ) + return response + + +# Global service instance +search_service = SearchService() \ No newline at end of file diff --git a/api/settings_service.py b/api/settings_service.py new file mode 100644 index 0000000000000000000000000000000000000000..ed84e02ec0c7667b13e5ce3aeb0e43bd74398692 --- /dev/null +++ b/api/settings_service.py @@ -0,0 +1,58 @@ +""" +Settings service layer using API. +""" + + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.content_settings import ContentSettings + + +class SettingsService: + """Service layer for settings operations using API.""" + + def __init__(self): + logger.info("Using API for settings operations") + + def get_settings(self) -> ContentSettings: + """Get application settings.""" + settings_response = api_client.get_settings() + settings_data = settings_response if isinstance(settings_response, dict) else settings_response[0] + + # Create ContentSettings object from API response + settings = ContentSettings( + default_content_processing_engine_doc=settings_data.get("default_content_processing_engine_doc"), + default_content_processing_engine_url=settings_data.get("default_content_processing_engine_url"), + default_embedding_option=settings_data.get("default_embedding_option"), + auto_delete_files=settings_data.get("auto_delete_files"), + youtube_preferred_languages=settings_data.get("youtube_preferred_languages"), + ) + + return settings + + def update_settings(self, settings: ContentSettings) -> ContentSettings: + """Update application settings.""" + updates = { + "default_content_processing_engine_doc": settings.default_content_processing_engine_doc, + "default_content_processing_engine_url": settings.default_content_processing_engine_url, + "default_embedding_option": settings.default_embedding_option, + "auto_delete_files": settings.auto_delete_files, + "youtube_preferred_languages": settings.youtube_preferred_languages, + } + + settings_response = api_client.update_settings(**updates) + settings_data = settings_response if isinstance(settings_response, dict) else settings_response[0] + + # Update the settings object with the response + settings.default_content_processing_engine_doc = settings_data.get("default_content_processing_engine_doc") + settings.default_content_processing_engine_url = settings_data.get("default_content_processing_engine_url") + settings.default_embedding_option = settings_data.get("default_embedding_option") + settings.auto_delete_files = settings_data.get("auto_delete_files") + settings.youtube_preferred_languages = settings_data.get("youtube_preferred_languages") + + return settings + + +# Global service instance +settings_service = SettingsService() \ No newline at end of file diff --git a/api/sources_service.py b/api/sources_service.py new file mode 100644 index 0000000000000000000000000000000000000000..6e3fa3b2d0ba45b668e4aae2f7f2c65867039a7d --- /dev/null +++ b/api/sources_service.py @@ -0,0 +1,305 @@ +""" +Sources service layer using API. +""" + +from dataclasses import dataclass +from typing import Dict, List, Optional, Union + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.notebook import Asset, Source + + +@dataclass +class SourceProcessingResult: + """Result of source creation with optional async processing info.""" + source: Source + is_async: bool = False + command_id: Optional[str] = None + status: Optional[str] = None + processing_info: Optional[Dict] = None + + +@dataclass +class SourceWithMetadata: + """Source object with additional metadata from API.""" + source: Source + embedded_chunks: int + + # Expose common source properties for easy access + @property + def id(self): + return self.source.id + + @property + def title(self): + return self.source.title + + @title.setter + def title(self, value): + self.source.title = value + + @property + def topics(self): + return self.source.topics + + @property + def asset(self): + return self.source.asset + + @property + def full_text(self): + return self.source.full_text + + @property + def created(self): + return self.source.created + + @property + def updated(self): + return self.source.updated + + +class SourcesService: + """Service layer for sources operations using API.""" + + def __init__(self): + logger.info("Using API for sources operations") + + def get_all_sources(self, notebook_id: Optional[str] = None) -> List[SourceWithMetadata]: + """Get all sources with optional notebook filtering.""" + sources_data = api_client.get_sources(notebook_id=notebook_id) + # Convert API response to SourceWithMetadata objects + sources = [] + for source_data in sources_data: + source = Source( + title=source_data["title"], + topics=source_data["topics"], + asset=Asset( + file_path=source_data["asset"]["file_path"] + if source_data["asset"] + else None, + url=source_data["asset"]["url"] if source_data["asset"] else None, + ) + if source_data["asset"] + else None, + ) + source.id = source_data["id"] + source.created = source_data["created"] + source.updated = source_data["updated"] + + # Wrap in SourceWithMetadata + source_with_metadata = SourceWithMetadata( + source=source, + embedded_chunks=source_data.get("embedded_chunks", 0) + ) + sources.append(source_with_metadata) + return sources + + def get_source(self, source_id: str) -> SourceWithMetadata: + """Get a specific source.""" + response = api_client.get_source(source_id) + source_data = response if isinstance(response, dict) else response[0] + source = Source( + title=source_data["title"], + topics=source_data["topics"], + full_text=source_data["full_text"], + asset=Asset( + file_path=source_data["asset"]["file_path"] + if source_data["asset"] + else None, + url=source_data["asset"]["url"] if source_data["asset"] else None, + ) + if source_data["asset"] + else None, + ) + source.id = source_data["id"] + source.created = source_data["created"] + source.updated = source_data["updated"] + + return SourceWithMetadata( + source=source, + embedded_chunks=source_data.get("embedded_chunks", 0) + ) + + def create_source( + self, + notebook_id: Optional[str] = None, + source_type: str = "text", + url: Optional[str] = None, + file_path: Optional[str] = None, + content: Optional[str] = None, + title: Optional[str] = None, + transformations: Optional[List[str]] = None, + embed: bool = False, + delete_source: bool = False, + notebooks: Optional[List[str]] = None, + async_processing: bool = False, + ) -> Union[Source, SourceProcessingResult]: + """ + Create a new source with support for async processing. + + Args: + notebook_id: Single notebook ID (deprecated, use notebooks parameter) + source_type: Type of source (link, upload, text) + url: URL for link sources + file_path: File path for upload sources + content: Text content for text sources + title: Optional source title + transformations: List of transformation IDs to apply + embed: Whether to embed content for vector search + delete_source: Whether to delete uploaded file after processing + notebooks: List of notebook IDs to add source to (preferred over notebook_id) + async_processing: Whether to process source asynchronously + + Returns: + Source object for sync processing (backward compatibility) + SourceProcessingResult for async processing (contains additional metadata) + """ + source_data = api_client.create_source( + notebook_id=notebook_id, + notebooks=notebooks, + source_type=source_type, + url=url, + file_path=file_path, + content=content, + title=title, + transformations=transformations, + embed=embed, + delete_source=delete_source, + async_processing=async_processing, + ) + + # Create Source object from response + response_data = source_data if isinstance(source_data, dict) else source_data[0] + source = Source( + title=response_data["title"], + topics=response_data.get("topics") or [], + full_text=response_data.get("full_text"), + asset=Asset( + file_path=response_data["asset"]["file_path"] + if response_data.get("asset") + else None, + url=response_data["asset"]["url"] + if response_data.get("asset") + else None, + ) + if response_data.get("asset") + else None, + ) + source.id = response_data["id"] + source.created = response_data["created"] + source.updated = response_data["updated"] + + # Check if this is an async processing response + if response_data.get("command_id") or response_data.get("status") or response_data.get("processing_info"): + # Ensure source_data is a dict for accessing attributes + source_data_dict = source_data if isinstance(source_data, dict) else source_data[0] + # Return enhanced result for async processing + return SourceProcessingResult( + source=source, + is_async=True, + command_id=source_data_dict.get("command_id"), + status=source_data_dict.get("status"), + processing_info=source_data_dict.get("processing_info"), + ) + else: + # Return simple Source for backward compatibility + return source + + def get_source_status(self, source_id: str) -> Dict: + """Get processing status for a source.""" + response = api_client.get_source_status(source_id) + return response if isinstance(response, dict) else response[0] + + def create_source_async( + self, + notebook_id: Optional[str] = None, + source_type: str = "text", + url: Optional[str] = None, + file_path: Optional[str] = None, + content: Optional[str] = None, + title: Optional[str] = None, + transformations: Optional[List[str]] = None, + embed: bool = False, + delete_source: bool = False, + notebooks: Optional[List[str]] = None, + ) -> SourceProcessingResult: + """ + Create a new source with async processing enabled. + + This is a convenience method that always uses async processing. + Returns a SourceProcessingResult with processing status information. + """ + result = self.create_source( + notebook_id=notebook_id, + notebooks=notebooks, + source_type=source_type, + url=url, + file_path=file_path, + content=content, + title=title, + transformations=transformations, + embed=embed, + delete_source=delete_source, + async_processing=True, + ) + + # Since we forced async_processing=True, this should always be a SourceProcessingResult + if isinstance(result, SourceProcessingResult): + return result + else: + # Fallback: wrap Source in SourceProcessingResult + return SourceProcessingResult( + source=result, + is_async=False, # This shouldn't happen, but handle it gracefully + ) + + def is_source_processing_complete(self, source_id: str) -> bool: + """ + Check if a source's async processing is complete. + + Returns True if processing is complete (success or failure), + False if still processing or queued. + """ + try: + status_data = self.get_source_status(source_id) + status = status_data.get("status") + return status in ["completed", "failed", None] # None indicates legacy/sync source + except Exception as e: + logger.error(f"Error checking source processing status: {e}") + return True # Assume complete on error + + def update_source(self, source: Source) -> Source: + """Update a source.""" + if not source.id: + raise ValueError("Source ID is required for update") + + updates = { + "title": source.title, + "topics": source.topics, + } + source_data = api_client.update_source(source.id, **updates) + + # Ensure source_data is a dict + source_data_dict = source_data if isinstance(source_data, dict) else source_data[0] + + # Update the source object with the response + source.title = source_data_dict["title"] + source.topics = source_data_dict["topics"] + source.updated = source_data_dict["updated"] + + return source + + def delete_source(self, source_id: str) -> bool: + """Delete a source.""" + api_client.delete_source(source_id) + return True + + +# Global service instance +sources_service = SourcesService() + +# Export important classes for easy importing +__all__ = ["SourcesService", "SourceWithMetadata", "SourceProcessingResult", "sources_service"] diff --git a/api/transformations_service.py b/api/transformations_service.py new file mode 100644 index 0000000000000000000000000000000000000000..876b9a92764dec2d05e44ab8d05e8e60608b5242 --- /dev/null +++ b/api/transformations_service.py @@ -0,0 +1,130 @@ +""" +Transformations service layer using API. +""" + +from datetime import datetime +from typing import Any, Dict, List, Union + +from loguru import logger + +from api.client import api_client +from open_notebook.domain.transformation import Transformation + + +class TransformationsService: + """Service layer for transformations operations using API.""" + + def __init__(self): + logger.info("Using API for transformations operations") + + def get_all_transformations(self) -> List[Transformation]: + """Get all transformations.""" + transformations_data = api_client.get_transformations() + # Convert API response to Transformation objects + transformations = [] + for trans_data in transformations_data: + transformation = Transformation( + name=trans_data["name"], + title=trans_data["title"], + description=trans_data["description"], + prompt=trans_data["prompt"], + apply_default=trans_data["apply_default"], + ) + transformation.id = trans_data["id"] + transformation.created = datetime.fromisoformat(trans_data["created"].replace('Z', '+00:00')) + transformation.updated = datetime.fromisoformat(trans_data["updated"].replace('Z', '+00:00')) + transformations.append(transformation) + return transformations + + def get_transformation(self, transformation_id: str) -> Transformation: + """Get a specific transformation.""" + response = api_client.get_transformation(transformation_id) + trans_data = response if isinstance(response, dict) else response[0] + transformation = Transformation( + name=trans_data["name"], + title=trans_data["title"], + description=trans_data["description"], + prompt=trans_data["prompt"], + apply_default=trans_data["apply_default"], + ) + transformation.id = trans_data["id"] + transformation.created = datetime.fromisoformat(trans_data["created"].replace('Z', '+00:00')) + transformation.updated = datetime.fromisoformat(trans_data["updated"].replace('Z', '+00:00')) + return transformation + + def create_transformation( + self, + name: str, + title: str, + description: str, + prompt: str, + apply_default: bool = False + ) -> Transformation: + """Create a new transformation.""" + response = api_client.create_transformation( + name=name, + title=title, + description=description, + prompt=prompt, + apply_default=apply_default + ) + trans_data = response if isinstance(response, dict) else response[0] + transformation = Transformation( + name=trans_data["name"], + title=trans_data["title"], + description=trans_data["description"], + prompt=trans_data["prompt"], + apply_default=trans_data["apply_default"], + ) + transformation.id = trans_data["id"] + transformation.created = datetime.fromisoformat(trans_data["created"].replace('Z', '+00:00')) + transformation.updated = datetime.fromisoformat(trans_data["updated"].replace('Z', '+00:00')) + return transformation + + def update_transformation(self, transformation: Transformation) -> Transformation: + """Update a transformation.""" + if not transformation.id: + raise ValueError("Transformation ID is required for update") + + updates = { + "name": transformation.name, + "title": transformation.title, + "description": transformation.description, + "prompt": transformation.prompt, + "apply_default": transformation.apply_default, + } + response = api_client.update_transformation(transformation.id, **updates) + trans_data = response if isinstance(response, dict) else response[0] + + # Update the transformation object with the response + transformation.name = trans_data["name"] + transformation.title = trans_data["title"] + transformation.description = trans_data["description"] + transformation.prompt = trans_data["prompt"] + transformation.apply_default = trans_data["apply_default"] + transformation.updated = datetime.fromisoformat(trans_data["updated"].replace('Z', '+00:00')) + + return transformation + + def delete_transformation(self, transformation_id: str) -> bool: + """Delete a transformation.""" + api_client.delete_transformation(transformation_id) + return True + + def execute_transformation( + self, + transformation_id: str, + input_text: str, + model_id: str + ) -> Union[Dict[Any, Any], List[Dict[Any, Any]]]: + """Execute a transformation on input text.""" + result = api_client.execute_transformation( + transformation_id=transformation_id, + input_text=input_text, + model_id=model_id + ) + return result + + +# Global service instance +transformations_service = TransformationsService() \ No newline at end of file diff --git a/commands/__init__.py b/commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cd7fb896191144c5ce6447cbbd8e19f2f5f7b2a1 --- /dev/null +++ b/commands/__init__.py @@ -0,0 +1,15 @@ +"""Surreal-commands integration for Open Notebook""" + +from .embedding_commands import embed_single_item_command, rebuild_embeddings_command +from .example_commands import analyze_data_command, process_text_command +from .podcast_commands import generate_podcast_command +from .source_commands import process_source_command + +__all__ = [ + "embed_single_item_command", + "generate_podcast_command", + "process_source_command", + "process_text_command", + "analyze_data_command", + "rebuild_embeddings_command", +] diff --git a/commands/embedding_commands.py b/commands/embedding_commands.py new file mode 100644 index 0000000000000000000000000000000000000000..6e0445f8700beaab834ca752876a5b4072fe3309 --- /dev/null +++ b/commands/embedding_commands.py @@ -0,0 +1,633 @@ +import time +from typing import Dict, List, Literal, Optional + +from loguru import logger +from pydantic import BaseModel +from surreal_commands import CommandInput, CommandOutput, command, submit_command + +from open_notebook.database.repository import ensure_record_id, repo_query +from open_notebook.domain.models import model_manager +from open_notebook.domain.notebook import Note, Source, SourceInsight +from open_notebook.utils.text_utils import split_text + + +def full_model_dump(model): + if isinstance(model, BaseModel): + return model.model_dump() + elif isinstance(model, dict): + return {k: full_model_dump(v) for k, v in model.items()} + elif isinstance(model, list): + return [full_model_dump(item) for item in model] + else: + return model + + +class EmbedSingleItemInput(CommandInput): + item_id: str + item_type: Literal["source", "note", "insight"] + + +class EmbedSingleItemOutput(CommandOutput): + success: bool + item_id: str + item_type: str + chunks_created: int = 0 # For sources + processing_time: float + error_message: Optional[str] = None + + +class EmbedChunkInput(CommandInput): + source_id: str + chunk_index: int + chunk_text: str + + +class EmbedChunkOutput(CommandOutput): + success: bool + source_id: str + chunk_index: int + error_message: Optional[str] = None + + +class VectorizeSourceInput(CommandInput): + source_id: str + + +class VectorizeSourceOutput(CommandOutput): + success: bool + source_id: str + total_chunks: int + jobs_submitted: int + processing_time: float + error_message: Optional[str] = None + + +class RebuildEmbeddingsInput(CommandInput): + mode: Literal["existing", "all"] + include_sources: bool = True + include_notes: bool = True + include_insights: bool = True + + +class RebuildEmbeddingsOutput(CommandOutput): + success: bool + total_items: int + processed_items: int + failed_items: int + sources_processed: int = 0 + notes_processed: int = 0 + insights_processed: int = 0 + processing_time: float + error_message: Optional[str] = None + + +@command("embed_single_item", app="open_notebook") +async def embed_single_item_command( + input_data: EmbedSingleItemInput, +) -> EmbedSingleItemOutput: + """ + Embed a single item (source, note, or insight) + """ + start_time = time.time() + + try: + logger.info( + f"Starting embedding for {input_data.item_type}: {input_data.item_id}" + ) + + # Check if embedding model is available + EMBEDDING_MODEL = await model_manager.get_embedding_model() + if not EMBEDDING_MODEL: + raise ValueError( + "No embedding model configured. Please configure one in the Models section." + ) + + chunks_created = 0 + + if input_data.item_type == "source": + # Get source and vectorize + source = await Source.get(input_data.item_id) + if not source: + raise ValueError(f"Source '{input_data.item_id}' not found") + + await source.vectorize() + + # Count chunks created + chunks_result = await repo_query( + "SELECT VALUE count() FROM source_embedding WHERE source = $source_id GROUP ALL", + {"source_id": ensure_record_id(input_data.item_id)}, + ) + if chunks_result and isinstance(chunks_result[0], dict): + chunks_created = chunks_result[0].get("count", 0) + elif chunks_result and isinstance(chunks_result[0], int): + chunks_created = chunks_result[0] + else: + chunks_created = 0 + + logger.info(f"Source vectorized: {chunks_created} chunks created") + + elif input_data.item_type == "note": + # Get note and save (auto-embeds via ObjectModel.save()) + note = await Note.get(input_data.item_id) + if not note: + raise ValueError(f"Note '{input_data.item_id}' not found") + + await note.save() + logger.info(f"Note embedded: {input_data.item_id}") + + elif input_data.item_type == "insight": + # Get insight and re-generate embedding + insight = await SourceInsight.get(input_data.item_id) + if not insight: + raise ValueError(f"Insight '{input_data.item_id}' not found") + + # Generate new embedding + embedding = (await EMBEDDING_MODEL.aembed([insight.content]))[0] + + # Update insight with new embedding + await repo_query( + "UPDATE $insight_id SET embedding = $embedding", + { + "insight_id": ensure_record_id(input_data.item_id), + "embedding": embedding, + }, + ) + logger.info(f"Insight embedded: {input_data.item_id}") + + else: + raise ValueError( + f"Invalid item_type: {input_data.item_type}. Must be 'source', 'note', or 'insight'" + ) + + processing_time = time.time() - start_time + logger.info( + f"Successfully embedded {input_data.item_type} {input_data.item_id} in {processing_time:.2f}s" + ) + + return EmbedSingleItemOutput( + success=True, + item_id=input_data.item_id, + item_type=input_data.item_type, + chunks_created=chunks_created, + processing_time=processing_time, + ) + + except Exception as e: + processing_time = time.time() - start_time + logger.error(f"Embedding failed for {input_data.item_type} {input_data.item_id}: {e}") + logger.exception(e) + + return EmbedSingleItemOutput( + success=False, + item_id=input_data.item_id, + item_type=input_data.item_type, + processing_time=processing_time, + error_message=str(e), + ) + + +@command( + "embed_chunk", + app="open_notebook", + retry={ + "max_attempts": 5, + "wait_strategy": "exponential_jitter", + "wait_min": 1, + "wait_max": 30, + "retry_on": [RuntimeError, ConnectionError, TimeoutError], + }, +) +async def embed_chunk_command( + input_data: EmbedChunkInput, +) -> EmbedChunkOutput: + """ + Process a single text chunk for embedding as part of source vectorization. + + This command is designed to be submitted as a background job for each chunk + of a source document, allowing natural concurrency control through the worker pool. + + Retry Strategy: + - Retries up to 5 times for transient failures: + * RuntimeError: SurrealDB transaction conflicts ("read or write conflict") + * ConnectionError: Network failures when calling embedding provider + * TimeoutError: Request timeouts to embedding provider + - Uses exponential-jitter backoff (1-30s) to prevent thundering herd during concurrent operations + - Does NOT retry permanent failures (ValueError, authentication errors, invalid input) + + Exception Handling: + - RuntimeError, ConnectionError, TimeoutError: Re-raised to trigger retry mechanism + - ValueError and other exceptions: Caught and returned as permanent failures (no retry) + """ + try: + logger.debug( + f"Processing chunk {input_data.chunk_index} for source {input_data.source_id}" + ) + + # Get embedding model + EMBEDDING_MODEL = await model_manager.get_embedding_model() + if not EMBEDDING_MODEL: + raise ValueError( + "No embedding model configured. Please configure one in the Models section." + ) + + # Generate embedding for the chunk + embedding = (await EMBEDDING_MODEL.aembed([input_data.chunk_text]))[0] + + # Insert chunk embedding into database + await repo_query( + """ + CREATE source_embedding CONTENT { + "source": $source_id, + "order": $order, + "content": $content, + "embedding": $embedding, + }; + """, + { + "source_id": ensure_record_id(input_data.source_id), + "order": input_data.chunk_index, + "content": input_data.chunk_text, + "embedding": embedding, + }, + ) + + logger.debug( + f"Successfully embedded chunk {input_data.chunk_index} for source {input_data.source_id}" + ) + + return EmbedChunkOutput( + success=True, + source_id=input_data.source_id, + chunk_index=input_data.chunk_index, + ) + + except RuntimeError: + # Re-raise RuntimeError to allow retry mechanism to handle DB transaction conflicts + logger.warning( + f"Transaction conflict for chunk {input_data.chunk_index} - will be retried by retry mechanism" + ) + raise + except (ConnectionError, TimeoutError) as e: + # Re-raise network/timeout errors to allow retry mechanism to handle transient provider failures + logger.warning( + f"Network/timeout error for chunk {input_data.chunk_index} ({type(e).__name__}: {e}) - will be retried by retry mechanism" + ) + raise + except Exception as e: + # Catch other exceptions (ValueError, etc.) as permanent failures + logger.error( + f"Failed to embed chunk {input_data.chunk_index} for source {input_data.source_id}: {e}" + ) + logger.exception(e) + + return EmbedChunkOutput( + success=False, + source_id=input_data.source_id, + chunk_index=input_data.chunk_index, + error_message=str(e), + ) + + +@command("vectorize_source", app="open_notebook", retry=None) +async def vectorize_source_command( + input_data: VectorizeSourceInput, +) -> VectorizeSourceOutput: + """ + Orchestrate source vectorization by splitting text into chunks and submitting + individual embed_chunk jobs to the worker queue. + + This command: + 1. Deletes existing embeddings (idempotency) + 2. Splits source text into chunks + 3. Submits each chunk as a separate embed_chunk job + 4. Returns immediately (jobs run in background) + + Natural concurrency control is provided by the worker pool size. + + Retry Strategy: + - Retries disabled (retry=None) - fails fast on job submission errors + - This ensures immediate visibility when orchestration fails + - Individual embed_chunk jobs have their own retry logic for DB conflicts + """ + start_time = time.time() + + try: + logger.info(f"Starting vectorization orchestration for source {input_data.source_id}") + + # 1. Load source + source = await Source.get(input_data.source_id) + if not source: + raise ValueError(f"Source '{input_data.source_id}' not found") + + if not source.full_text: + raise ValueError(f"Source {input_data.source_id} has no text to vectorize") + + # 2. Delete existing embeddings (idempotency) + logger.info(f"Deleting existing embeddings for source {input_data.source_id}") + delete_result = await repo_query( + "DELETE source_embedding WHERE source = $source_id", + {"source_id": ensure_record_id(input_data.source_id)} + ) + deleted_count = len(delete_result) if delete_result else 0 + if deleted_count > 0: + logger.info(f"Deleted {deleted_count} existing embeddings") + + # 3. Split text into chunks + logger.info(f"Splitting text into chunks for source {input_data.source_id}") + chunks = split_text(source.full_text) + total_chunks = len(chunks) + logger.info(f"Split into {total_chunks} chunks") + + if total_chunks == 0: + raise ValueError("No chunks created after splitting text") + + # 4. Submit each chunk as a separate job + logger.info(f"Submitting {total_chunks} chunk jobs to worker queue") + jobs_submitted = 0 + + for idx, chunk_text in enumerate(chunks): + try: + job_id = submit_command( + "open_notebook", # app name + "embed_chunk", # command name + { + "source_id": input_data.source_id, + "chunk_index": idx, + "chunk_text": chunk_text, + } + ) + jobs_submitted += 1 + + if (idx + 1) % 100 == 0: + logger.info(f" Submitted {idx + 1}/{total_chunks} chunk jobs") + + except Exception as e: + logger.error(f"Failed to submit chunk job {idx}: {e}") + # Continue submitting other chunks even if one fails + + processing_time = time.time() - start_time + + logger.info( + f"Vectorization orchestration complete for source {input_data.source_id}: " + f"{jobs_submitted}/{total_chunks} jobs submitted in {processing_time:.2f}s" + ) + + return VectorizeSourceOutput( + success=True, + source_id=input_data.source_id, + total_chunks=total_chunks, + jobs_submitted=jobs_submitted, + processing_time=processing_time, + ) + + except Exception as e: + processing_time = time.time() - start_time + logger.error(f"Vectorization orchestration failed for source {input_data.source_id}: {e}") + logger.exception(e) + + return VectorizeSourceOutput( + success=False, + source_id=input_data.source_id, + total_chunks=0, + jobs_submitted=0, + processing_time=processing_time, + error_message=str(e), + ) + + +async def collect_items_for_rebuild( + mode: str, + include_sources: bool, + include_notes: bool, + include_insights: bool, +) -> Dict[str, List[str]]: + """ + Collect items to rebuild based on mode and include flags. + + Returns: + Dict with keys: 'sources', 'notes', 'insights' containing lists of item IDs + """ + items: Dict[str, List[str]] = {"sources": [], "notes": [], "insights": []} + + if include_sources: + if mode == "existing": + # Query sources with embeddings (via source_embedding table) + result = await repo_query( + """ + RETURN array::distinct( + SELECT VALUE source.id + FROM source_embedding + WHERE embedding != none AND array::len(embedding) > 0 + ) + """ + ) + # RETURN returns the array directly as the result (not nested) + if result: + items["sources"] = [str(item) for item in result] + else: + items["sources"] = [] + else: # mode == "all" + # Query all sources with content + result = await repo_query("SELECT id FROM source WHERE full_text != none") + items["sources"] = [str(item["id"]) for item in result] if result else [] + + logger.info(f"Collected {len(items['sources'])} sources for rebuild") + + if include_notes: + if mode == "existing": + # Query notes with embeddings + result = await repo_query( + "SELECT id FROM note WHERE embedding != none AND array::len(embedding) > 0" + ) + else: # mode == "all" + # Query all notes (with content) + result = await repo_query("SELECT id FROM note WHERE content != none") + + items["notes"] = [str(item["id"]) for item in result] if result else [] + logger.info(f"Collected {len(items['notes'])} notes for rebuild") + + if include_insights: + if mode == "existing": + # Query insights with embeddings + result = await repo_query( + "SELECT id FROM source_insight WHERE embedding != none AND array::len(embedding) > 0" + ) + else: # mode == "all" + # Query all insights + result = await repo_query("SELECT id FROM source_insight") + + items["insights"] = [str(item["id"]) for item in result] if result else [] + logger.info(f"Collected {len(items['insights'])} insights for rebuild") + + return items + + +@command("rebuild_embeddings", app="open_notebook", retry=None) +async def rebuild_embeddings_command( + input_data: RebuildEmbeddingsInput, +) -> RebuildEmbeddingsOutput: + """ + Rebuild embeddings for sources, notes, and/or insights + + Retry Strategy: + - Retries disabled (retry=None) - batch failures are immediately reported + - This ensures immediate visibility when batch operations fail + - Allows operators to quickly identify and resolve issues + """ + start_time = time.time() + + try: + logger.info("=" * 60) + logger.info(f"Starting embedding rebuild with mode={input_data.mode}") + logger.info(f"Include: sources={input_data.include_sources}, notes={input_data.include_notes}, insights={input_data.include_insights}") + logger.info("=" * 60) + + # Check embedding model availability + EMBEDDING_MODEL = await model_manager.get_embedding_model() + if not EMBEDDING_MODEL: + raise ValueError( + "No embedding model configured. Please configure one in the Models section." + ) + + logger.info(f"Using embedding model: {EMBEDDING_MODEL}") + + # Collect items to process + items = await collect_items_for_rebuild( + input_data.mode, + input_data.include_sources, + input_data.include_notes, + input_data.include_insights, + ) + + total_items = ( + len(items["sources"]) + len(items["notes"]) + len(items["insights"]) + ) + logger.info(f"Total items to process: {total_items}") + + if total_items == 0: + logger.warning("No items found to rebuild") + return RebuildEmbeddingsOutput( + success=True, + total_items=0, + processed_items=0, + failed_items=0, + processing_time=time.time() - start_time, + ) + + # Initialize counters + sources_processed = 0 + notes_processed = 0 + insights_processed = 0 + failed_items = 0 + + # Process sources + logger.info(f"\nProcessing {len(items['sources'])} sources...") + for idx, source_id in enumerate(items["sources"], 1): + try: + source = await Source.get(source_id) + if not source: + logger.warning(f"Source {source_id} not found, skipping") + failed_items += 1 + continue + + await source.vectorize() + sources_processed += 1 + + if idx % 10 == 0 or idx == len(items["sources"]): + logger.info( + f" Progress: {idx}/{len(items['sources'])} sources processed" + ) + + except Exception as e: + logger.error(f"Failed to re-embed source {source_id}: {e}") + failed_items += 1 + + # Process notes + logger.info(f"\nProcessing {len(items['notes'])} notes...") + for idx, note_id in enumerate(items["notes"], 1): + try: + note = await Note.get(note_id) + if not note: + logger.warning(f"Note {note_id} not found, skipping") + failed_items += 1 + continue + + await note.save() # Auto-embeds via ObjectModel.save() + notes_processed += 1 + + if idx % 10 == 0 or idx == len(items["notes"]): + logger.info(f" Progress: {idx}/{len(items['notes'])} notes processed") + + except Exception as e: + logger.error(f"Failed to re-embed note {note_id}: {e}") + failed_items += 1 + + # Process insights + logger.info(f"\nProcessing {len(items['insights'])} insights...") + for idx, insight_id in enumerate(items["insights"], 1): + try: + insight = await SourceInsight.get(insight_id) + if not insight: + logger.warning(f"Insight {insight_id} not found, skipping") + failed_items += 1 + continue + + # Re-generate embedding + embedding = (await EMBEDDING_MODEL.aembed([insight.content]))[0] + + # Update insight with new embedding + await repo_query( + "UPDATE $insight_id SET embedding = $embedding", + { + "insight_id": ensure_record_id(insight_id), + "embedding": embedding, + }, + ) + insights_processed += 1 + + if idx % 10 == 0 or idx == len(items["insights"]): + logger.info( + f" Progress: {idx}/{len(items['insights'])} insights processed" + ) + + except Exception as e: + logger.error(f"Failed to re-embed insight {insight_id}: {e}") + failed_items += 1 + + processing_time = time.time() - start_time + processed_items = sources_processed + notes_processed + insights_processed + + logger.info("=" * 60) + logger.info("REBUILD COMPLETE") + logger.info(f" Total processed: {processed_items}/{total_items}") + logger.info(f" Sources: {sources_processed}") + logger.info(f" Notes: {notes_processed}") + logger.info(f" Insights: {insights_processed}") + logger.info(f" Failed: {failed_items}") + logger.info(f" Time: {processing_time:.2f}s") + logger.info("=" * 60) + + return RebuildEmbeddingsOutput( + success=True, + total_items=total_items, + processed_items=processed_items, + failed_items=failed_items, + sources_processed=sources_processed, + notes_processed=notes_processed, + insights_processed=insights_processed, + processing_time=processing_time, + ) + + except Exception as e: + processing_time = time.time() - start_time + logger.error(f"Rebuild embeddings failed: {e}") + logger.exception(e) + + return RebuildEmbeddingsOutput( + success=False, + total_items=0, + processed_items=0, + failed_items=0, + processing_time=processing_time, + error_message=str(e), + ) diff --git a/commands/example_commands.py b/commands/example_commands.py new file mode 100644 index 0000000000000000000000000000000000000000..c1439e60d03e78ceaff379ce7a7d6faee43aa7db --- /dev/null +++ b/commands/example_commands.py @@ -0,0 +1,135 @@ +import asyncio +import time +from typing import List, Optional + +from loguru import logger +from pydantic import BaseModel +from surreal_commands import command + + +class TextProcessingInput(BaseModel): + text: str + operation: str = "uppercase" # uppercase, lowercase, word_count, reverse + delay_seconds: Optional[int] = None # For testing async behavior + +class TextProcessingOutput(BaseModel): + success: bool + original_text: str + processed_text: Optional[str] = None + word_count: Optional[int] = None + processing_time: float + error_message: Optional[str] = None + +class DataAnalysisInput(BaseModel): + numbers: List[float] + analysis_type: str = "basic" # basic, detailed + delay_seconds: Optional[int] = None + +class DataAnalysisOutput(BaseModel): + success: bool + analysis_type: str + count: int + sum: Optional[float] = None + average: Optional[float] = None + min_value: Optional[float] = None + max_value: Optional[float] = None + processing_time: float + error_message: Optional[str] = None + +@command("process_text", app="open_notebook") +async def process_text_command(input_data: TextProcessingInput) -> TextProcessingOutput: + """ + Example command for text processing. Tests basic command functionality + and demonstrates different processing types. + """ + start_time = time.time() + + try: + logger.info(f"Processing text with operation: {input_data.operation}") + + # Simulate processing delay if specified + if input_data.delay_seconds: + await asyncio.sleep(input_data.delay_seconds) + + processed_text = None + word_count = None + + if input_data.operation == "uppercase": + processed_text = input_data.text.upper() + elif input_data.operation == "lowercase": + processed_text = input_data.text.lower() + elif input_data.operation == "reverse": + processed_text = input_data.text[::-1] + elif input_data.operation == "word_count": + word_count = len(input_data.text.split()) + processed_text = f"Word count: {word_count}" + else: + raise ValueError(f"Unknown operation: {input_data.operation}") + + processing_time = time.time() - start_time + + return TextProcessingOutput( + success=True, + original_text=input_data.text, + processed_text=processed_text, + word_count=word_count, + processing_time=processing_time + ) + + except Exception as e: + processing_time = time.time() - start_time + logger.error(f"Text processing failed: {e}") + return TextProcessingOutput( + success=False, + original_text=input_data.text, + processing_time=processing_time, + error_message=str(e) + ) + +@command("analyze_data", app="open_notebook") +async def analyze_data_command(input_data: DataAnalysisInput) -> DataAnalysisOutput: + """ + Example command for data analysis. Tests command with complex input/output + and demonstrates error handling. + """ + start_time = time.time() + + try: + logger.info(f"Analyzing {len(input_data.numbers)} numbers with {input_data.analysis_type} analysis") + + # Simulate processing delay if specified + if input_data.delay_seconds: + await asyncio.sleep(input_data.delay_seconds) + + if not input_data.numbers: + raise ValueError("No numbers provided for analysis") + + count = len(input_data.numbers) + sum_value = sum(input_data.numbers) + average = sum_value / count + min_value = min(input_data.numbers) + max_value = max(input_data.numbers) + + processing_time = time.time() - start_time + + return DataAnalysisOutput( + success=True, + analysis_type=input_data.analysis_type, + count=count, + sum=sum_value, + average=average, + min_value=min_value, + max_value=max_value, + processing_time=processing_time + ) + + except Exception as e: + processing_time = time.time() - start_time + logger.error(f"Data analysis failed: {e}") + return DataAnalysisOutput( + success=False, + analysis_type=input_data.analysis_type, + count=0, + processing_time=processing_time, + error_message=str(e) + ) \ No newline at end of file diff --git a/commands/podcast_commands.py b/commands/podcast_commands.py new file mode 100644 index 0000000000000000000000000000000000000000..1382abb65f6413b1abd92bd27ba95b74e8af8ab4 --- /dev/null +++ b/commands/podcast_commands.py @@ -0,0 +1,187 @@ +import time +from pathlib import Path +from typing import Optional + +from loguru import logger +from pydantic import BaseModel +from surreal_commands import CommandInput, CommandOutput, command + +from open_notebook.config import DATA_FOLDER +from open_notebook.database.repository import ensure_record_id, repo_query +from open_notebook.domain.podcast import EpisodeProfile, PodcastEpisode, SpeakerProfile + +try: + from podcast_creator import configure, create_podcast +except ImportError as e: + logger.error(f"Failed to import podcast_creator: {e}") + raise ValueError("podcast_creator library not available") + + +def full_model_dump(model): + if isinstance(model, BaseModel): + return model.model_dump() + elif isinstance(model, dict): + return {k: full_model_dump(v) for k, v in model.items()} + elif isinstance(model, list): + return [full_model_dump(item) for item in model] + else: + return model + + +class PodcastGenerationInput(CommandInput): + episode_profile: str + speaker_profile: str + episode_name: str + content: str + briefing_suffix: Optional[str] = None + + +class PodcastGenerationOutput(CommandOutput): + success: bool + episode_id: Optional[str] = None + audio_file_path: Optional[str] = None + transcript: Optional[dict] = None + outline: Optional[dict] = None + processing_time: float + error_message: Optional[str] = None + + +@command("generate_podcast", app="open_notebook") +async def generate_podcast_command( + input_data: PodcastGenerationInput, +) -> PodcastGenerationOutput: + """ + Real podcast generation using podcast-creator library with Episode Profiles + """ + start_time = time.time() + + try: + logger.info( + f"Starting podcast generation for episode: {input_data.episode_name}" + ) + logger.info(f"Using episode profile: {input_data.episode_profile}") + + # 1. Load Episode and Speaker profiles from SurrealDB + episode_profile = await EpisodeProfile.get_by_name(input_data.episode_profile) + if not episode_profile: + raise ValueError( + f"Episode profile '{input_data.episode_profile}' not found" + ) + + speaker_profile = await SpeakerProfile.get_by_name( + episode_profile.speaker_config + ) + if not speaker_profile: + raise ValueError( + f"Speaker profile '{episode_profile.speaker_config}' not found" + ) + + logger.info(f"Loaded episode profile: {episode_profile.name}") + logger.info(f"Loaded speaker profile: {speaker_profile.name}") + + # 3. Load all profiles and configure podcast-creator + episode_profiles = await repo_query("SELECT * FROM episode_profile") + speaker_profiles = await repo_query("SELECT * FROM speaker_profile") + + # Transform the surrealdb array into a dictionary for podcast-creator + episode_profiles_dict = { + profile["name"]: profile for profile in episode_profiles + } + speaker_profiles_dict = { + profile["name"]: profile for profile in speaker_profiles + } + + # 4. Generate briefing + briefing = episode_profile.default_briefing + if input_data.briefing_suffix: + briefing += f"\n\nAdditional instructions: {input_data.briefing_suffix}" + + # Create the a record for the episose and associate with the ongoing command + episode = PodcastEpisode( + name=input_data.episode_name, + episode_profile=full_model_dump(episode_profile.model_dump()), + speaker_profile=full_model_dump(speaker_profile.model_dump()), + command=ensure_record_id(input_data.execution_context.command_id) + if input_data.execution_context + else None, + briefing=briefing, + content=input_data.content, + audio_file=None, + transcript=None, + outline=None, + ) + await episode.save() + + configure("speakers_config", {"profiles": speaker_profiles_dict}) + configure("episode_config", {"profiles": episode_profiles_dict}) + + logger.info("Configured podcast-creator with episode and speaker profiles") + + logger.info(f"Generated briefing (length: {len(briefing)} chars)") + + # 5. Create output directory + output_dir = Path(f"{DATA_FOLDER}/podcasts/episodes/{input_data.episode_name}") + output_dir.mkdir(parents=True, exist_ok=True) + + logger.info(f"Created output directory: {output_dir}") + + # 6. Generate podcast using podcast-creator + logger.info("Starting podcast generation with podcast-creator...") + + result = await create_podcast( + content=input_data.content, + briefing=briefing, + episode_name=input_data.episode_name, + output_dir=str(output_dir), + speaker_config=speaker_profile.name, + episode_profile=episode_profile.name, + ) + + episode.audio_file = ( + str(result.get("final_output_file_path")) if result else None + ) + episode.transcript = { + "transcript": full_model_dump(result["transcript"]) if result else None + } + episode.outline = full_model_dump(result["outline"]) if result else None + await episode.save() + + processing_time = time.time() - start_time + logger.info( + f"Successfully generated podcast episode: {episode.id} in {processing_time:.2f}s" + ) + + return PodcastGenerationOutput( + success=True, + episode_id=str(episode.id), + audio_file_path=str(result.get("final_output_file_path")) + if result + else None, + transcript={"transcript": full_model_dump(result["transcript"])} + if result.get("transcript") + else None, + outline=full_model_dump(result["outline"]) + if result.get("outline") + else None, + processing_time=processing_time, + ) + + except Exception as e: + processing_time = time.time() - start_time + logger.error(f"Podcast generation failed: {e}") + logger.exception(e) + + # Check for specific GPT-5 extended thinking issue + error_msg = str(e) + if "Invalid json output" in error_msg or "Expecting value" in error_msg: + # This often happens with GPT-5 models that use extended thinking ( tags) + # and put all output inside thinking blocks + error_msg += ( + "\n\nNOTE: This error commonly occurs with GPT-5 models that use extended thinking. " + "The model may be putting all output inside tags, leaving nothing to parse. " + "Try using gpt-4o, gpt-4o-mini, or gpt-4-turbo instead in your episode profile." + ) + + return PodcastGenerationOutput( + success=False, processing_time=processing_time, error_message=error_msg + ) diff --git a/commands/source_commands.py b/commands/source_commands.py new file mode 100644 index 0000000000000000000000000000000000000000..538592194d446dd3e51351c2cf370bac6c1a2302 --- /dev/null +++ b/commands/source_commands.py @@ -0,0 +1,152 @@ +import time +from typing import Any, Dict, List, Optional + +from loguru import logger +from pydantic import BaseModel +from surreal_commands import CommandInput, CommandOutput, command + +from open_notebook.database.repository import ensure_record_id +from open_notebook.domain.notebook import Source +from open_notebook.domain.transformation import Transformation + +try: + from open_notebook.graphs.source import source_graph +except ImportError as e: + logger.error(f"Failed to import source_graph: {e}") + raise ValueError("source_graph not available") + + +def full_model_dump(model): + if isinstance(model, BaseModel): + return model.model_dump() + elif isinstance(model, dict): + return {k: full_model_dump(v) for k, v in model.items()} + elif isinstance(model, list): + return [full_model_dump(item) for item in model] + else: + return model + + +class SourceProcessingInput(CommandInput): + source_id: str + content_state: Dict[str, Any] + notebook_ids: List[str] + transformations: List[str] + embed: bool + + +class SourceProcessingOutput(CommandOutput): + success: bool + source_id: str + embedded_chunks: int = 0 + insights_created: int = 0 + processing_time: float + error_message: Optional[str] = None + + +@command( + "process_source", + app="open_notebook", + retry={ + "max_attempts": 5, + "wait_strategy": "exponential_jitter", + "wait_min": 1, + "wait_max": 30, + "retry_on": [RuntimeError], + }, +) +async def process_source_command( + input_data: SourceProcessingInput, +) -> SourceProcessingOutput: + """ + Process source content using the source_graph workflow + """ + start_time = time.time() + + try: + logger.info(f"Starting source processing for source: {input_data.source_id}") + logger.info(f"Notebook IDs: {input_data.notebook_ids}") + logger.info(f"Transformations: {input_data.transformations}") + logger.info(f"Embed: {input_data.embed}") + + # 1. Load transformation objects from IDs + transformations = [] + for trans_id in input_data.transformations: + logger.info(f"Loading transformation: {trans_id}") + transformation = await Transformation.get(trans_id) + if not transformation: + raise ValueError(f"Transformation '{trans_id}' not found") + transformations.append(transformation) + + logger.info(f"Loaded {len(transformations)} transformations") + + # 2. Get existing source record to update its command field + source = await Source.get(input_data.source_id) + if not source: + raise ValueError(f"Source '{input_data.source_id}' not found") + + # Update source with command reference + source.command = ( + ensure_record_id(input_data.execution_context.command_id) + if input_data.execution_context + else None + ) + await source.save() + + logger.info(f"Updated source {source.id} with command reference") + + # 3. Process source with all notebooks + logger.info(f"Processing source with {len(input_data.notebook_ids)} notebooks") + + # Execute source_graph with all notebooks + result = await source_graph.ainvoke( + { # type: ignore[arg-type] + "content_state": input_data.content_state, + "notebook_ids": input_data.notebook_ids, # Use notebook_ids (plural) as expected by SourceState + "apply_transformations": transformations, + "embed": input_data.embed, + "source_id": input_data.source_id, # Add the source_id to the state + } + ) + + processed_source = result["source"] + + # 4. Gather processing results (notebook associations handled by source_graph) + embedded_chunks = ( + await processed_source.get_embedded_chunks() if input_data.embed else 0 + ) + insights_list = await processed_source.get_insights() + insights_created = len(insights_list) + + processing_time = time.time() - start_time + logger.info( + f"Successfully processed source: {processed_source.id} in {processing_time:.2f}s" + ) + logger.info( + f"Created {insights_created} insights and {embedded_chunks} embedded chunks" + ) + + return SourceProcessingOutput( + success=True, + source_id=str(processed_source.id), + embedded_chunks=embedded_chunks, + insights_created=insights_created, + processing_time=processing_time, + ) + + except RuntimeError as e: + # Transaction conflicts should be retried by surreal-commands + logger.warning(f"Transaction conflict, will retry: {e}") + raise + + except Exception as e: + # Other errors are permanent failures + processing_time = time.time() - start_time + logger.error(f"Source processing failed: {e}") + + return SourceProcessingOutput( + success=False, + source_id=input_data.source_id, + processing_time=processing_time, + error_message=str(e), + ) diff --git a/diagnose.ps1 b/diagnose.ps1 new file mode 100644 index 0000000000000000000000000000000000000000..6ac6025bd1daac6386056ff1e0481950030df27c --- /dev/null +++ b/diagnose.ps1 @@ -0,0 +1,116 @@ +Write-Host "=====================================" -ForegroundColor Cyan +Write-Host " Open Notebook Diagnostics" -ForegroundColor Cyan +Write-Host "=====================================" -ForegroundColor Cyan + +# Check if services are running +Write-Host "`n1. Checking if services are running..." -ForegroundColor Yellow + +# Check Frontend (port 3000) +try { + $frontendResponse = Invoke-WebRequest -Uri "http://localhost:3000" -TimeoutSec 5 + if ($frontendResponse.StatusCode -eq 200) { + Write-Host " [OK] Frontend is running on port 3000" -ForegroundColor Green + } +} catch { + Write-Host " [ERROR] Frontend is NOT running on port 3000" -ForegroundColor Red +} + +# Check Backend (port 5055) +try { + $backendResponse = Invoke-WebRequest -Uri "http://localhost:5055/health" -TimeoutSec 5 + if ($backendResponse.StatusCode -eq 200) { + Write-Host " [OK] Backend is running on port 5055" -ForegroundColor Green + } +} catch { + Write-Host " [ERROR] Backend is NOT running on port 5055" -ForegroundColor Red +} + +# Test API Endpoints +Write-Host "`n2. Testing API endpoints..." -ForegroundColor Yellow +$apiUrl = "http://localhost:3000" + +try { + $sources = Invoke-RestMethod -Uri "$apiUrl/api/sources" -Method Get + $sourceCount = $sources.Count + Write-Host " [OK] Sources API: OK ($sourceCount sources found)" -ForegroundColor Green +} catch { + Write-Host " [ERROR] Sources API: FAILED - $($_.Exception.Message)" -ForegroundColor Red +} + +try { + $transformations = Invoke-RestMethod -Uri "$apiUrl/api/transformations" -Method Get + $transCount = $transformations.Count + Write-Host " [OK] Transformations API: OK ($transCount transformations found)" -ForegroundColor Green +} catch { + Write-Host " [ERROR] Transformations API: FAILED - $($_.Exception.Message)" -ForegroundColor Red +} + +# Check component file +Write-Host "`n3. Checking component file..." -ForegroundColor Yellow +$componentFile = "frontend\src\components\source\SourceDetailContent.tsx" + +if (Test-Path $componentFile) { + Write-Host " [OK] Component file exists" -ForegroundColor Green + + $content = Get-Content $componentFile -Raw + + # Check for key features + $hasTabs = $content -match 'TabsList' + $hasInsights = $content -match 'value="insights"' + $hasDetails = $content -match 'value="details"' + $hasFetchInsights = $content -match 'fetchInsights' + $hasCreateInsight = $content -match 'createInsight' + + if ($hasTabs) { + Write-Host " [OK] Tabs component found" -ForegroundColor Green + } else { + Write-Host " [ERROR] Tabs component NOT found" -ForegroundColor Red + } + + if ($hasInsights) { + Write-Host " [OK] Insights tab found" -ForegroundColor Green + } else { + Write-Host " [ERROR] Insights tab NOT found" -ForegroundColor Red + } + + if ($hasDetails) { + Write-Host " [OK] Details tab found" -ForegroundColor Green + } else { + Write-Host " [ERROR] Details tab NOT found" -ForegroundColor Red + } + + if ($hasFetchInsights) { + Write-Host " [OK] fetchInsights function found" -ForegroundColor Green + } else { + Write-Host " [ERROR] fetchInsights function NOT found" -ForegroundColor Red + } + + if ($hasCreateInsight) { + Write-Host " [OK] createInsight function found" -ForegroundColor Green + } else { + Write-Host " [ERROR] createInsight function NOT found" -ForegroundColor Red + } +} else { + Write-Host " [ERROR] Component file NOT found" -ForegroundColor Red +} + +Write-Host "`n=====================================" -ForegroundColor Cyan +Write-Host " Manual Verification Steps" -ForegroundColor Cyan +Write-Host "=====================================" -ForegroundColor Cyan + +Write-Host "`nPlease do the following:" -ForegroundColor Yellow +Write-Host "1. Open http://localhost:3000 in your browser" +Write-Host "2. Navigate to a source (click on any uploaded PDF)" +Write-Host "3. Look for THREE tabs: Content, Insights, Details" +Write-Host "4. Click on the 'Insights' tab" +Write-Host "" +Write-Host "What do you see?" -ForegroundColor Cyan +Write-Host " - Are the three tabs visible?" +Write-Host " - Can you click on the 'Insights' tab?" +Write-Host " - What happens when you click it?" +Write-Host "" + +Write-Host "`nIf you do not see the tabs:" -ForegroundColor Yellow +Write-Host "1. Open browser DevTools (F12)" +Write-Host "2. Check the Console for any errors" +Write-Host "3. Take a screenshot and share it" diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 0000000000000000000000000000000000000000..1417ea8c56a8b86b6ac1799dc5af2e6a63d0531e --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,28 @@ +services: + surrealdb: + image: surrealdb/surrealdb:v2 + volumes: + - ./surreal_data:/mydata + environment: + - SURREAL_EXPERIMENTAL_GRAPHQL=true + ports: + - "8000:8000" + command: start --log info --user root --pass root rocksdb:/mydata/mydatabase.db + pull_policy: always + user: root + restart: always + open_notebook: + build: + context: . + dockerfile: Dockerfile + ports: + - "8502:8502" + - "5055:5055" + env_file: + - ./docker.env + depends_on: + - surrealdb + volumes: + - ./notebook_data:/app/data + restart: always + diff --git a/docker-compose.full.yml b/docker-compose.full.yml new file mode 100644 index 0000000000000000000000000000000000000000..9675f05f76b941b3504903fdf2ec47e1c1312071 --- /dev/null +++ b/docker-compose.full.yml @@ -0,0 +1,25 @@ +services: + surrealdb: + image: surrealdb/surrealdb:v2 + volumes: + - ./surreal_data:/mydata + environment: + - SURREAL_EXPERIMENTAL_GRAPHQL=true + ports: + - "8000:8000" + command: start --log info --user root --pass root rocksdb:/mydata/mydatabase.db + pull_policy: always + user: root + restart: always + open_notebook: + image: lfnovo/open_notebook:v1-latest + ports: + - "8502:8502" + - "5055:5055" + env_file: + - ./docker.env + depends_on: + - surrealdb + volumes: + - ./notebook_data:/app/data + restart: always diff --git a/docker-compose.single.yml b/docker-compose.single.yml new file mode 100644 index 0000000000000000000000000000000000000000..ad641bd70e12c0fe51a109c28c9c965810f95f07 --- /dev/null +++ b/docker-compose.single.yml @@ -0,0 +1,20 @@ +services: + open_notebook_single: + # image: lfnovo/open_notebook:v1-latest-single + build: + context: . + dockerfile: Dockerfile.single + ports: + - "8502:8502" # Next.js Frontend + - "5055:5055" # REST API + env_file: + - ./docker.env + volumes: + - ./notebook_data:/app/data # Application data + - ./surreal_single_data:/mydata # SurrealDB data + restart: always + # Single container includes all services: SurrealDB, API, Worker, and Next.js Frontend + # Access: + # - Next.js UI: http://localhost:8502 + # - REST API: http://localhost:5055 + # - API Documentation: http://localhost:5055/docs \ No newline at end of file diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..55f665441d22d3d5e41ca7e464ca300faac2a864 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,43 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts + +doc_exports/ \ No newline at end of file diff --git a/frontend/components.json b/frontend/components.json new file mode 100644 index 0000000000000000000000000000000000000000..ffe928f5b6dfe484f57a5fd47d0487f21e164fa3 --- /dev/null +++ b/frontend/components.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "", + "css": "src/app/globals.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "iconLibrary": "lucide" +} \ No newline at end of file diff --git a/frontend/eslint.config.mjs b/frontend/eslint.config.mjs new file mode 100644 index 0000000000000000000000000000000000000000..c85fb67c463f20d1ee449b0ffee725a61dfb9259 --- /dev/null +++ b/frontend/eslint.config.mjs @@ -0,0 +1,16 @@ +import { dirname } from "path"; +import { fileURLToPath } from "url"; +import { FlatCompat } from "@eslint/eslintrc"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const compat = new FlatCompat({ + baseDirectory: __dirname, +}); + +const eslintConfig = [ + ...compat.extends("next/core-web-vitals", "next/typescript"), +]; + +export default eslintConfig; diff --git a/frontend/next.config.ts b/frontend/next.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..df572ee8ccae97d236a7167148f1833adc265b98 --- /dev/null +++ b/frontend/next.config.ts @@ -0,0 +1,35 @@ +import type { NextConfig } from "next"; + +const nextConfig: NextConfig = { + // Disable standalone for Railway deployment + // Railway works better with standard Next.js build + // output: "standalone", + + // Ignore lint/type errors during build for faster production build + eslint: { + ignoreDuringBuilds: true, + }, + typescript: { + ignoreBuildErrors: true, + }, + // API Rewrites: Proxy /api/* requests to FastAPI backend + // This simplifies reverse proxy configuration - users only need to proxy to port 8502 + // Next.js handles internal routing to the API backend on port 5055 + async rewrites() { + // INTERNAL_API_URL: Where Next.js server-side should proxy API requests + // Default: http://localhost:5055 (single-container deployment) + // Override for multi-container: INTERNAL_API_URL=http://api-service:5055 + const internalApiUrl = process.env.INTERNAL_API_URL || 'http://127.0.0.1:5055' + + console.log(`[Next.js Rewrites] Proxying /api/* to ${internalApiUrl}/api/*`) + + return [ + { + source: '/api/:path*', + destination: `${internalApiUrl}/api/:path*`, + }, + ] + }, +}; + +export default nextConfig; diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000000000000000000000000000000000000..c010bbbb09bce1176631bb1fc0ffc3b8f19f56c3 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,11091 @@ +{ + "name": "frontend", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.1.0", + "dependencies": { + "@hookform/resolvers": "^5.1.1", + "@monaco-editor/react": "^4.7.0", + "@radix-ui/react-accordion": "^1.2.12", + "@radix-ui/react-alert-dialog": "^1.1.14", + "@radix-ui/react-checkbox": "^1.3.2", + "@radix-ui/react-collapsible": "^1.1.11", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.15", + "@radix-ui/react-label": "^2.1.7", + "@radix-ui/react-popover": "^1.1.15", + "@radix-ui/react-progress": "^1.1.7", + "@radix-ui/react-radio-group": "^1.3.8", + "@radix-ui/react-scroll-area": "^1.2.9", + "@radix-ui/react-select": "^2.2.5", + "@radix-ui/react-separator": "^1.1.7", + "@radix-ui/react-slot": "^1.2.3", + "@radix-ui/react-tabs": "^1.1.12", + "@radix-ui/react-tooltip": "^1.2.7", + "@tailwindcss/typography": "^0.5.16", + "@tanstack/react-query": "^5.83.0", + "@uiw/react-md-editor": "^4.0.8", + "axios": "^1.12.0", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "cmdk": "^1.1.1", + "d3-force": "^3.0.0", + "date-fns": "^4.1.0", + "lucide-react": "^0.525.0", + "mermaid": "^11.12.2", + "next": "15.4.10", + "next-themes": "^0.4.6", + "react": "19.1.0", + "react-dom": "19.1.0", + "react-force-graph-2d": "^1.29.0", + "react-hook-form": "^7.60.0", + "react-markdown": "^10.1.0", + "remark-gfm": "^4.0.1", + "sonner": "^2.0.6", + "tailwind-merge": "^3.3.1", + "use-debounce": "^10.0.6", + "zod": "^4.0.5", + "zustand": "^5.0.6" + }, + "devDependencies": { + "@eslint/eslintrc": "^3", + "@tailwindcss/postcss": "^4", + "@types/node": "^20", + "@types/react": "^19", + "@types/react-dom": "^19", + "eslint": "^9", + "eslint-config-next": "15.4.2", + "tailwindcss": "^4", + "tw-animate-css": "^1.3.5", + "typescript": "^5" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@antfu/install-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz", + "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==", + "license": "MIT", + "dependencies": { + "package-manager-detector": "^1.3.0", + "tinyexec": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@braintree/sanitize-url": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz", + "integrity": "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==", + "license": "MIT" + }, + "node_modules/@chevrotain/cst-dts-gen": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz", + "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/gast": "11.0.3", + "@chevrotain/types": "11.0.3", + "lodash-es": "4.17.21" + } + }, + "node_modules/@chevrotain/cst-dts-gen/node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/@chevrotain/gast": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz", + "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/types": "11.0.3", + "lodash-es": "4.17.21" + } + }, + "node_modules/@chevrotain/gast/node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/@chevrotain/regexp-to-ast": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz", + "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==", + "license": "Apache-2.0" + }, + "node_modules/@chevrotain/types": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz", + "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==", + "license": "Apache-2.0" + }, + "node_modules/@chevrotain/utils": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz", + "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==", + "license": "Apache-2.0" + }, + "node_modules/@emnapi/core": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.4.5.tgz", + "integrity": "sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==", + "dev": true, + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.4", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.5.tgz", + "integrity": "sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg==", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.4.tgz", + "integrity": "sha512-PJR+bOmMOPH8AtcTGAyYNiuJ3/Fcoj2XN/gBEWzDIKh254XO+mM9XoXHk5GNEhodxeMznbg7BlRojVbKN+gC6g==", + "dev": true, + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "dev": true, + "dependencies": { + "@eslint/object-schema": "^2.1.6", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", + "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "9.31.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.31.0.tgz", + "integrity": "sha512-LOm5OVt7D4qiKCqoiPbA7LWmI+tbw1VbTUowBcUMgQSuM6poJufkFkYDcQpo5KfgD39TnNySV26QjOh7VFpSyw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", + "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", + "dev": true, + "dependencies": { + "@eslint/core": "^0.15.1", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.2.tgz", + "integrity": "sha512-wNB5ooIKHQc+Kui96jE/n69rHFWAVoxn5CAzL1Xdd8FG03cgY3MLO+GF9U3W737fYDSgPWA6MReKhBQBop6Pcw==", + "dependencies": { + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.2.tgz", + "integrity": "sha512-7cfaOQuCS27HD7DX+6ib2OrnW+b4ZBwDNnCcT0uTyidcmyWb03FnQqJybDBoCnpdxwBSfA94UAYlRCt7mV+TbA==", + "dependencies": { + "@floating-ui/core": "^1.7.2", + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.4.tgz", + "integrity": "sha512-JbbpPhp38UmXDDAu60RJmbeme37Jbgsm7NrHGgzYYFKmblzRUh6Pa641dII6LsjwF4XlScDrde2UAzDo/b9KPw==", + "dependencies": { + "@floating-ui/dom": "^1.7.2" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==" + }, + "node_modules/@hookform/resolvers": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@hookform/resolvers/-/resolvers-5.1.1.tgz", + "integrity": "sha512-J/NVING3LMAEvexJkyTLjruSm7aOFx7QX21pzkiJfMoNG0wl5aFEjLTl7ay7IQb9EWY6AkrBy7tHL2Alijpdcg==", + "dependencies": { + "@standard-schema/utils": "^0.3.0" + }, + "peerDependencies": { + "react-hook-form": "^7.55.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.6", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", + "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "dev": true, + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.3.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true, + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@iconify/types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz", + "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==", + "license": "MIT" + }, + "node_modules/@iconify/utils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-3.1.0.tgz", + "integrity": "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==", + "license": "MIT", + "dependencies": { + "@antfu/install-pkg": "^1.1.0", + "@iconify/types": "^2.0.0", + "mlly": "^1.8.0" + } + }, + "node_modules/@img/sharp-darwin-arm64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.3.tgz", + "integrity": "sha512-ryFMfvxxpQRsgZJqBd4wsttYQbCxsJksrv9Lw/v798JcQ8+w84mBWuXwl+TT0WJ/WrYOLaYpwQXi3sA9nTIaIg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.2.0" + } + }, + "node_modules/@img/sharp-darwin-x64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.3.tgz", + "integrity": "sha512-yHpJYynROAj12TA6qil58hmPmAwxKKC7reUqtGLzsOHfP7/rniNGTL8tjWX6L3CTV4+5P4ypcS7Pp+7OB+8ihA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.2.0" + } + }, + "node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.0.tgz", + "integrity": "sha512-sBZmpwmxqwlqG9ueWFXtockhsxefaV6O84BMOrhtg/YqbTaRdqDE7hxraVE3y6gVM4eExmfzW4a8el9ArLeEiQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.0.tgz", + "integrity": "sha512-M64XVuL94OgiNHa5/m2YvEQI5q2cl9d/wk0qFTDVXcYzi43lxuiFTftMR1tOnFQovVXNZJ5TURSDK2pNe9Yzqg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.0.tgz", + "integrity": "sha512-mWd2uWvDtL/nvIzThLq3fr2nnGfyr/XMXlq8ZJ9WMR6PXijHlC3ksp0IpuhK6bougvQrchUAfzRLnbsen0Cqvw==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.0.tgz", + "integrity": "sha512-RXwd0CgG+uPRX5YYrkzKyalt2OJYRiJQ8ED/fi1tq9WQW2jsQIn0tqrlR5l5dr/rjqq6AHAxURhj2DVjyQWSOA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-ppc64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.0.tgz", + "integrity": "sha512-Xod/7KaDDHkYu2phxxfeEPXfVXFKx70EAFZ0qyUdOjCcxbjqyJOEUpDe6RIyaunGxT34Anf9ue/wuWOqBW2WcQ==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.0.tgz", + "integrity": "sha512-eMKfzDxLGT8mnmPJTNMcjfO33fLiTDsrMlUVcp6b96ETbnJmd4uvZxVJSKPQfS+odwfVaGifhsB07J1LynFehw==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.0.tgz", + "integrity": "sha512-ZW3FPWIc7K1sH9E3nxIGB3y3dZkpJlMnkk7z5tu1nSkBoCgw2nSRTFHI5pB/3CQaJM0pdzMF3paf9ckKMSE9Tg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.0.tgz", + "integrity": "sha512-UG+LqQJbf5VJ8NWJ5Z3tdIe/HXjuIdo4JeVNADXBFuG7z9zjoegpzzGIyV5zQKi4zaJjnAd2+g2nna8TZvuW9Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.0.tgz", + "integrity": "sha512-SRYOLR7CXPgNze8akZwjoGBoN1ThNZoqpOgfnOxmWsklTGVfJiGJoC/Lod7aNMGA1jSsKWM1+HRX43OP6p9+6Q==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-linux-arm": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.3.tgz", + "integrity": "sha512-oBK9l+h6KBN0i3dC8rYntLiVfW8D8wH+NPNT3O/WBHeW0OQWCjfWksLUaPidsrDKpJgXp3G3/hkmhptAW0I3+A==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.2.0" + } + }, + "node_modules/@img/sharp-linux-arm64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.3.tgz", + "integrity": "sha512-QdrKe3EvQrqwkDrtuTIjI0bu6YEJHTgEeqdzI3uWJOH6G1O8Nl1iEeVYRGdj1h5I21CqxSvQp1Yv7xeU3ZewbA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.2.0" + } + }, + "node_modules/@img/sharp-linux-ppc64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.3.tgz", + "integrity": "sha512-GLtbLQMCNC5nxuImPR2+RgrviwKwVql28FWZIW1zWruy6zLgA5/x2ZXk3mxj58X/tszVF69KK0Is83V8YgWhLA==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-ppc64": "1.2.0" + } + }, + "node_modules/@img/sharp-linux-s390x": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.3.tgz", + "integrity": "sha512-3gahT+A6c4cdc2edhsLHmIOXMb17ltffJlxR0aC2VPZfwKoTGZec6u5GrFgdR7ciJSsHT27BD3TIuGcuRT0KmQ==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.2.0" + } + }, + "node_modules/@img/sharp-linux-x64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.3.tgz", + "integrity": "sha512-8kYso8d806ypnSq3/Ly0QEw90V5ZoHh10yH0HnrzOCr6DKAPI6QVHvwleqMkVQ0m+fc7EH8ah0BB0QPuWY6zJQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.2.0" + } + }, + "node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.3.tgz", + "integrity": "sha512-vAjbHDlr4izEiXM1OTggpCcPg9tn4YriK5vAjowJsHwdBIdx0fYRsURkxLG2RLm9gyBq66gwtWI8Gx0/ov+JKQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.2.0" + } + }, + "node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.3.tgz", + "integrity": "sha512-gCWUn9547K5bwvOn9l5XGAEjVTTRji4aPTqLzGXHvIr6bIDZKNTA34seMPgM0WmSf+RYBH411VavCejp3PkOeQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.2.0" + } + }, + "node_modules/@img/sharp-wasm32": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.3.tgz", + "integrity": "sha512-+CyRcpagHMGteySaWos8IbnXcHgfDn7pO2fiC2slJxvNq9gDipYBN42/RagzctVRKgxATmfqOSulgZv5e1RdMg==", + "cpu": [ + "wasm32" + ], + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.4.4" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-arm64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.3.tgz", + "integrity": "sha512-MjnHPnbqMXNC2UgeLJtX4XqoVHHlZNd+nPt1kRPmj63wURegwBhZlApELdtxM2OIZDRv/DFtLcNhVbd1z8GYXQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-ia32": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.3.tgz", + "integrity": "sha512-xuCdhH44WxuXgOM714hn4amodJMZl3OEvf0GVTm0BEyMeA2to+8HEdRPShH0SLYptJY1uBw+SCFP9WVQi1Q/cw==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-x64": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.3.tgz", + "integrity": "sha512-OWwz05d++TxzLEv4VnsTz5CmZ6mI6S05sfQGEMrNrQcOEERbX46332IvE7pO/EUiw7jUrrS40z/M7kPyjfl04g==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.12", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", + "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.29", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", + "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@mermaid-js/parser": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.3.tgz", + "integrity": "sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==", + "license": "MIT", + "dependencies": { + "langium": "3.3.1" + } + }, + "node_modules/@monaco-editor/loader": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.5.0.tgz", + "integrity": "sha512-hKoGSM+7aAc7eRTRjpqAZucPmoNOC4UUbknb/VNoTkEIkCPhqV8LfbsgM1webRM7S/z21eHEx9Fkwx8Z/C/+Xw==", + "dependencies": { + "state-local": "^1.0.6" + } + }, + "node_modules/@monaco-editor/react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.7.0.tgz", + "integrity": "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA==", + "dependencies": { + "@monaco-editor/loader": "^1.5.0" + }, + "peerDependencies": { + "monaco-editor": ">= 0.25.0 < 1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, + "node_modules/@next/env": { + "version": "15.4.10", + "resolved": "https://registry.npmjs.org/@next/env/-/env-15.4.10.tgz", + "integrity": "sha512-knhmoJ0Vv7VRf6pZEPSnciUG1S4bIhWx+qTYBW/AjxEtlzsiNORPk8sFDCEvqLfmKuey56UB9FL1UdHEV3uBrg==", + "license": "MIT" + }, + "node_modules/@next/eslint-plugin-next": { + "version": "15.4.2", + "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-15.4.2.tgz", + "integrity": "sha512-k0rjdWjXBY6tAOty1ckrMETE6Mx66d85NsgcAIdDp7/cXOsTJ93ywmbg3uUcpxX5TUHFEcCWI5mb8nPhwCe9jg==", + "dev": true, + "dependencies": { + "fast-glob": "3.3.1" + } + }, + "node_modules/@next/swc-darwin-arm64": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.4.8.tgz", + "integrity": "sha512-Pf6zXp7yyQEn7sqMxur6+kYcywx5up1J849psyET7/8pG2gQTVMjU3NzgIt8SeEP5to3If/SaWmaA6H6ysBr1A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.4.8.tgz", + "integrity": "sha512-xla6AOfz68a6kq3gRQccWEvFC/VRGJmA/QuSLENSO7CZX5WIEkSz7r1FdXUjtGCQ1c2M+ndUAH7opdfLK1PQbw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.4.8.tgz", + "integrity": "sha512-y3fmp+1Px/SJD+5ntve5QLZnGLycsxsVPkTzAc3zUiXYSOlTPqT8ynfmt6tt4fSo1tAhDPmryXpYKEAcoAPDJw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.4.8.tgz", + "integrity": "sha512-DX/L8VHzrr1CfwaVjBQr3GWCqNNFgyWJbeQ10Lx/phzbQo3JNAxUok1DZ8JHRGcL6PgMRgj6HylnLNndxn4Z6A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.4.8.tgz", + "integrity": "sha512-9fLAAXKAL3xEIFdKdzG5rUSvSiZTLLTCc6JKq1z04DR4zY7DbAPcRvNm3K1inVhTiQCs19ZRAgUerHiVKMZZIA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.4.8.tgz", + "integrity": "sha512-s45V7nfb5g7dbS7JK6XZDcapicVrMMvX2uYgOHP16QuKH/JA285oy6HcxlKqwUNaFY/UC6EvQ8QZUOo19cBKSA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.4.8.tgz", + "integrity": "sha512-KjgeQyOAq7t/HzAJcWPGA8X+4WY03uSCZ2Ekk98S9OgCFsb6lfBE3dbUzUuEQAN2THbwYgFfxX2yFTCMm8Kehw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "15.4.8", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.4.8.tgz", + "integrity": "sha512-Exsmf/+42fWVnLMaZHzshukTBxZrSwuuLKFvqhGHJ+mC1AokqieLY/XzAl3jc/CqhXLqLY3RRjkKJ9YnLPcRWg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nolyfill/is-core-module": { + "version": "1.0.39", + "resolved": "https://registry.npmjs.org/@nolyfill/is-core-module/-/is-core-module-1.0.39.tgz", + "integrity": "sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==", + "dev": true, + "engines": { + "node": ">=12.4.0" + } + }, + "node_modules/@radix-ui/number": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", + "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==" + }, + "node_modules/@radix-ui/primitive": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.2.tgz", + "integrity": "sha512-XnbHrrprsNqZKQhStrSwgRUQzoCI1glLzdw79xiZPoofhGICeZRSQ3dIxAKH1gb3OHfNf4d6f+vAv3kil2eggA==" + }, + "node_modules/@radix-ui/react-accordion": { + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-accordion/-/react-accordion-1.2.12.tgz", + "integrity": "sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collapsible": "1.1.12", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==" + }, + "node_modules/@radix-ui/react-alert-dialog": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.14.tgz", + "integrity": "sha512-IOZfZ3nPvN6lXpJTBCunFQPRSvK8MDgSc1FB85xnIpUKOw9en0dJj8JmCAxV7BiZdtYlUpmrQjoTFkVYtdoWzQ==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dialog": "1.1.14", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-alert-dialog/node_modules/@radix-ui/react-dialog": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.14.tgz", + "integrity": "sha512-+CpweKjqpzTmwRwcYECQcNYbI8V9VSQt0SNFKeEBLgfucbsLssU6Ppq7wUdNXEGb573bMjFhVjKVll8rmV6zMw==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.10", + "@radix-ui/react-focus-guards": "1.1.2", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", + "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-checkbox": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.3.2.tgz", + "integrity": "sha512-yd+dI56KZqawxKZrJ31eENUwqc1QSqg4OZ15rybGjF2ZNwMO+wCyHzAVLRp9qoYJf7kYy0YpZ2b0JCzJ42HZpA==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.12.tgz", + "integrity": "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==" + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", + "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", + "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==" + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", + "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.10.tgz", + "integrity": "sha512-IM1zzRV4W3HtVgftdQiiOmA0AdJlCtMLe00FXaHwgt3rAnNsIyDqshvkIW3hj/iu5hu8ERP7KIYki6NkqDxAwQ==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu": { + "version": "2.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.15.tgz", + "integrity": "sha512-mIBnOjgwo9AH3FyKaSWoSu/dYj6VdhJ7frEPiGTeXCdUFHjl9h3mFh2wwhEtINOmYXWhdpf1rY2minFsmaNgVQ==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-menu": "2.1.15", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.2.tgz", + "integrity": "sha512-fyjAACV62oPV925xFCrH8DR5xWhg9KYtJT4s3u54jxp+L/hbpTY2kIeEFFbFe+a/HCE94zGQMZLIpVTPVZDhaA==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", + "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz", + "integrity": "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu": { + "version": "2.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.15.tgz", + "integrity": "sha512-tVlmA3Vb9n8SZSd+YSbuFR66l87Wiy4du+YE+0hzKQEANA+7cWKH1WgqcEX4pXqxUFQKrWQGHdvEfw00TjFiew==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.10", + "@radix-ui/react-focus-guards": "1.1.2", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.7", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.10", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz", + "integrity": "sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==" + }, + "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-popper": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", + "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.7.tgz", + "integrity": "sha512-IUFAccz1JyKcf/RjB552PlWwxjeCJB8/4KxT7EhBHOJM+mN7LdW+B3kacJXILm32xawcMMjb2i0cIZpo+f9kiQ==", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", + "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.4.tgz", + "integrity": "sha512-ueDqRbdc4/bkaQT3GIpLQssRlFgWaL/U2z/S31qRwwLWoxHLgry3SIfCwhxeQNbirEUXFa+lq3RL3oBYXtcmIA==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.7.tgz", + "integrity": "sha512-vPdg/tF6YC/ynuBIJlk1mm7Le0VgW6ub6J2UWnTQ7/D23KXcPI1qy+0vBkgKgd38RCMJavBXpB83HPNFMTb0Fg==", + "dependencies": { + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-radio-group": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz", + "integrity": "sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-radio-group/node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==" + }, + "node_modules/@radix-ui/react-radio-group/node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-radio-group/node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", + "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.10.tgz", + "integrity": "sha512-dT9aOXUen9JSsxnMPv/0VqySQf5eDQ6LCk5Sw28kamz8wSOW2bJdlX2Bg5VUIIcV+6XlHpWTIuTPCf/UNIyq8Q==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-scroll-area": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.9.tgz", + "integrity": "sha512-YSjEfBXnhUELsO2VzjdtYYD4CfQjvao+lhhrX5XsHD7/cyUNzljF1FHEbgTPN7LH2MClfwRMIsYlqTYpKTTe2A==", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.2.5.tgz", + "integrity": "sha512-HnMTdXEVuuyzx63ME0ut4+sEMYW6oouHWNGUZc7ddvUWIcfCva/AMoqEW/3wnEllriMWBa0RHspCYnfCWJQYmA==", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.10", + "@radix-ui/react-focus-guards": "1.1.2", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.7", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.7.tgz", + "integrity": "sha512-0HEb8R9E8A+jZjvmFCy/J4xhbXy3TV+9XSnGJ3KvTtjlIUy/YQ/p6UYZvi7YbeoeXdyU9+Y3scizK6hkY37baA==", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tabs": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.12.tgz", + "integrity": "sha512-GTVAlRVrQrSw3cEARM0nAx73ixrWDPNZAruETn3oHCNP6SbZ/hNxdxp+u7VkIEv3/sFoLq1PfcHrl7Pnp0CDpw==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.10", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.7.tgz", + "integrity": "sha512-Ap+fNYwKTYJ9pzqW+Xe2HtMRbQ/EeWkj2qykZ6SuEV4iS/o1bZI5ssJbk4D2r8XuDuOBVz/tIx2JObtuqU+5Zw==", + "dependencies": { + "@radix-ui/primitive": "1.1.2", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.10", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.7", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.4", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", + "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", + "dependencies": { + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-effect-event": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", + "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", + "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-previous": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", + "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", + "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", + "dependencies": { + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", + "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", + "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", + "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==" + }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true + }, + "node_modules/@rushstack/eslint-patch": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.12.0.tgz", + "integrity": "sha512-5EwMtOqvJMMa3HbmxLlF74e+3/HhwBTMcvt3nqVJgGCozO6hzIPOBlwm8mGVNR9SN2IJpxSnlxczyDjcn7qIyw==", + "dev": true + }, + "node_modules/@standard-schema/utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", + "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==" + }, + "node_modules/@swc/helpers": { + "version": "0.5.15", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", + "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", + "dependencies": { + "tslib": "^2.8.0" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.11.tgz", + "integrity": "sha512-yzhzuGRmv5QyU9qLNg4GTlYI6STedBWRE7NjxP45CsFYYq9taI0zJXZBMqIC/c8fViNLhmrbpSFS57EoxUmD6Q==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "enhanced-resolve": "^5.18.1", + "jiti": "^2.4.2", + "lightningcss": "1.30.1", + "magic-string": "^0.30.17", + "source-map-js": "^1.2.1", + "tailwindcss": "4.1.11" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.11.tgz", + "integrity": "sha512-Q69XzrtAhuyfHo+5/HMgr1lAiPP/G40OMFAnws7xcFEYqcypZmdW8eGXaOUIeOl1dzPJBPENXgbjsOyhg2nkrg==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "detect-libc": "^2.0.4", + "tar": "^7.4.3" + }, + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.1.11", + "@tailwindcss/oxide-darwin-arm64": "4.1.11", + "@tailwindcss/oxide-darwin-x64": "4.1.11", + "@tailwindcss/oxide-freebsd-x64": "4.1.11", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.11", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.11", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.11", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.11", + "@tailwindcss/oxide-linux-x64-musl": "4.1.11", + "@tailwindcss/oxide-wasm32-wasi": "4.1.11", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.11", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.11" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.11.tgz", + "integrity": "sha512-3IfFuATVRUMZZprEIx9OGDjG3Ou3jG4xQzNTvjDoKmU9JdmoCohQJ83MYd0GPnQIu89YoJqvMM0G3uqLRFtetg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.11.tgz", + "integrity": "sha512-ESgStEOEsyg8J5YcMb1xl8WFOXfeBmrhAwGsFxxB2CxY9evy63+AtpbDLAyRkJnxLy2WsD1qF13E97uQyP1lfQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.11.tgz", + "integrity": "sha512-EgnK8kRchgmgzG6jE10UQNaH9Mwi2n+yw1jWmof9Vyg2lpKNX2ioe7CJdf9M5f8V9uaQxInenZkOxnTVL3fhAw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.11.tgz", + "integrity": "sha512-xdqKtbpHs7pQhIKmqVpxStnY1skuNh4CtbcyOHeX1YBE0hArj2romsFGb6yUmzkq/6M24nkxDqU8GYrKrz+UcA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.11.tgz", + "integrity": "sha512-ryHQK2eyDYYMwB5wZL46uoxz2zzDZsFBwfjssgB7pzytAeCCa6glsiJGjhTEddq/4OsIjsLNMAiMlHNYnkEEeg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.11.tgz", + "integrity": "sha512-mYwqheq4BXF83j/w75ewkPJmPZIqqP1nhoghS9D57CLjsh3Nfq0m4ftTotRYtGnZd3eCztgbSPJ9QhfC91gDZQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.11.tgz", + "integrity": "sha512-m/NVRFNGlEHJrNVk3O6I9ggVuNjXHIPoD6bqay/pubtYC9QIdAMpS+cswZQPBLvVvEF6GtSNONbDkZrjWZXYNQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.11.tgz", + "integrity": "sha512-YW6sblI7xukSD2TdbbaeQVDysIm/UPJtObHJHKxDEcW2exAtY47j52f8jZXkqE1krdnkhCMGqP3dbniu1Te2Fg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.11.tgz", + "integrity": "sha512-e3C/RRhGunWYNC3aSF7exsQkdXzQ/M+aYuZHKnw4U7KQwTJotnWsGOIVih0s2qQzmEzOFIJ3+xt7iq67K/p56Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.11.tgz", + "integrity": "sha512-Xo1+/GU0JEN/C/dvcammKHzeM6NqKovG+6921MR6oadee5XPBaKOumrJCXvopJ/Qb5TH7LX/UAywbqrP4lax0g==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@emnapi/wasi-threads": "^1.0.2", + "@napi-rs/wasm-runtime": "^0.2.11", + "@tybys/wasm-util": "^0.9.0", + "tslib": "^2.8.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { + "version": "1.4.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.2", + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { + "version": "1.4.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.11", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.9.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { + "version": "2.8.0", + "dev": true, + "inBundle": true, + "license": "0BSD", + "optional": true + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.11.tgz", + "integrity": "sha512-UgKYx5PwEKrac3GPNPf6HVMNhUIGuUh4wlDFR2jYYdkX6pL/rn73zTq/4pzUm8fOjAn5L8zDeHp9iXmUGOXZ+w==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.11.tgz", + "integrity": "sha512-YfHoggn1j0LK7wR82TOucWc5LDCguHnoS879idHekmmiR7g9HUtMw9MI0NHatS28u/Xlkfi9w5RJWgz2Dl+5Qg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/postcss": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.11.tgz", + "integrity": "sha512-q/EAIIpF6WpLhKEuQSEVMZNMIY8KhWoAemZ9eylNAih9jxMGAYPPWBn3I9QL/2jZ+e7OEz/tZkX5HwbBR4HohA==", + "dev": true, + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "@tailwindcss/node": "4.1.11", + "@tailwindcss/oxide": "4.1.11", + "postcss": "^8.4.41", + "tailwindcss": "4.1.11" + } + }, + "node_modules/@tailwindcss/typography": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.16.tgz", + "integrity": "sha512-0wDLwCVF5V3x3b1SGXPCDcdsbDHMBe+lkFzBRaHeLvNi+nrrnZ1lA18u+OTWO8iSWU2GxUOCvlXtDuqftc1oiA==", + "dependencies": { + "lodash.castarray": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.merge": "^4.6.2", + "postcss-selector-parser": "6.0.10" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" + } + }, + "node_modules/@tanstack/query-core": { + "version": "5.83.0", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.83.0.tgz", + "integrity": "sha512-0M8dA+amXUkyz5cVUm/B+zSk3xkQAcuXuz5/Q/LveT4ots2rBpPTZOzd7yJa2Utsf8D2Upl5KyjhHRY+9lB/XA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.83.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.83.0.tgz", + "integrity": "sha512-/XGYhZ3foc5H0VM2jLSD/NyBRIOK4q9kfeml4+0x2DlL6xVuAcVEW+hTlTapAmejObg0i3eNqhkr2dT+eciwoQ==", + "dependencies": { + "@tanstack/query-core": "5.83.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, + "node_modules/@tweenjs/tween.js": { + "version": "25.0.0", + "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-25.0.0.tgz", + "integrity": "sha512-XKLA6syeBUaPzx4j3qwMqzzq+V4uo72BnlbOjmuljLrRqdsd3qnzvZZoxvMHZ23ndsRS4aufU6JOZYpCbU6T1A==", + "license": "MIT" + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.0.tgz", + "integrity": "sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==", + "dev": true, + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/d3": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", + "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", + "license": "MIT", + "dependencies": { + "@types/d3-array": "*", + "@types/d3-axis": "*", + "@types/d3-brush": "*", + "@types/d3-chord": "*", + "@types/d3-color": "*", + "@types/d3-contour": "*", + "@types/d3-delaunay": "*", + "@types/d3-dispatch": "*", + "@types/d3-drag": "*", + "@types/d3-dsv": "*", + "@types/d3-ease": "*", + "@types/d3-fetch": "*", + "@types/d3-force": "*", + "@types/d3-format": "*", + "@types/d3-geo": "*", + "@types/d3-hierarchy": "*", + "@types/d3-interpolate": "*", + "@types/d3-path": "*", + "@types/d3-polygon": "*", + "@types/d3-quadtree": "*", + "@types/d3-random": "*", + "@types/d3-scale": "*", + "@types/d3-scale-chromatic": "*", + "@types/d3-selection": "*", + "@types/d3-shape": "*", + "@types/d3-time": "*", + "@types/d3-time-format": "*", + "@types/d3-timer": "*", + "@types/d3-transition": "*", + "@types/d3-zoom": "*" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", + "license": "MIT" + }, + "node_modules/@types/d3-axis": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", + "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-brush": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", + "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-chord": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", + "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-contour": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", + "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", + "license": "MIT", + "dependencies": { + "@types/d3-array": "*", + "@types/geojson": "*" + } + }, + "node_modules/@types/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", + "license": "MIT" + }, + "node_modules/@types/d3-dispatch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz", + "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==", + "license": "MIT" + }, + "node_modules/@types/d3-drag": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", + "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-dsv": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", + "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-fetch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", + "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", + "license": "MIT", + "dependencies": { + "@types/d3-dsv": "*" + } + }, + "node_modules/@types/d3-force": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", + "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", + "license": "MIT" + }, + "node_modules/@types/d3-format": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", + "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", + "license": "MIT" + }, + "node_modules/@types/d3-geo": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", + "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, + "node_modules/@types/d3-hierarchy": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", + "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-polygon": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", + "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", + "license": "MIT" + }, + "node_modules/@types/d3-quadtree": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", + "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", + "license": "MIT" + }, + "node_modules/@types/d3-random": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", + "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", + "license": "MIT" + }, + "node_modules/@types/d3-selection": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", + "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", + "license": "MIT" + }, + "node_modules/@types/d3-shape": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", + "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-time-format": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", + "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/d3-transition": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", + "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-zoom": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", + "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", + "license": "MIT", + "dependencies": { + "@types/d3-interpolate": "*", + "@types/d3-selection": "*" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", + "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", + "license": "MIT" + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==" + }, + "node_modules/@types/node": { + "version": "20.19.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.9.tgz", + "integrity": "sha512-cuVNgarYWZqxRJDQHEB58GEONhOK79QVR/qYx4S7kcUObQvUwvFnYxJuuHUKm2aieN9X3yZB4LZsuYNU1Qphsw==", + "dev": true, + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/prismjs": { + "version": "1.26.5", + "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.5.tgz", + "integrity": "sha512-AUZTa7hQ2KY5L7AmtSiqxlhWxb4ina0yd8hNbl4TWuqnv/pFP0nDMb3YrfSBf4hJVGLh2YEIBfKaBW/9UEl6IQ==" + }, + "node_modules/@types/react": { + "version": "19.1.8", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.8.tgz", + "integrity": "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g==", + "dependencies": { + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.1.6", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.6.tgz", + "integrity": "sha512-4hOiT/dwO8Ko0gV1m/TJZYk3y0KBnY9vzDh7W+DH17b2HFSOGgdj33dhihPeuy3l0q23+4e+hoXHV6hCC4dCXw==", + "devOptional": true, + "peerDependencies": { + "@types/react": "^19.0.0" + } + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.37.0.tgz", + "integrity": "sha512-jsuVWeIkb6ggzB+wPCsR4e6loj+rM72ohW6IBn2C+5NCvfUVY8s33iFPySSVXqtm5Hu29Ne/9bnA0JmyLmgenA==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.37.0", + "@typescript-eslint/type-utils": "8.37.0", + "@typescript-eslint/utils": "8.37.0", + "@typescript-eslint/visitor-keys": "8.37.0", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.37.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.37.0.tgz", + "integrity": "sha512-kVIaQE9vrN9RLCQMQ3iyRlVJpTiDUY6woHGb30JDkfJErqrQEmtdWH3gV0PBAfGZgQXoqzXOO0T3K6ioApbbAA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "8.37.0", + "@typescript-eslint/types": "8.37.0", + "@typescript-eslint/typescript-estree": "8.37.0", + "@typescript-eslint/visitor-keys": "8.37.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.37.0.tgz", + "integrity": "sha512-BIUXYsbkl5A1aJDdYJCBAo8rCEbAvdquQ8AnLb6z5Lp1u3x5PNgSSx9A/zqYc++Xnr/0DVpls8iQ2cJs/izTXA==", + "dev": true, + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.37.0", + "@typescript-eslint/types": "^8.37.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.37.0.tgz", + "integrity": "sha512-0vGq0yiU1gbjKob2q691ybTg9JX6ShiVXAAfm2jGf3q0hdP6/BruaFjL/ManAR/lj05AvYCH+5bbVo0VtzmjOA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.37.0", + "@typescript-eslint/visitor-keys": "8.37.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.37.0.tgz", + "integrity": "sha512-1/YHvAVTimMM9mmlPvTec9NP4bobA1RkDbMydxG8omqwJJLEW/Iy2C4adsAESIXU3WGLXFHSZUU+C9EoFWl4Zg==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.37.0.tgz", + "integrity": "sha512-SPkXWIkVZxhgwSwVq9rqj/4VFo7MnWwVaRNznfQDc/xPYHjXnPfLWn+4L6FF1cAz6e7dsqBeMawgl7QjUMj4Ow==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.37.0", + "@typescript-eslint/typescript-estree": "8.37.0", + "@typescript-eslint/utils": "8.37.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.37.0.tgz", + "integrity": "sha512-ax0nv7PUF9NOVPs+lmQ7yIE7IQmAf8LGcXbMvHX5Gm+YJUYNAl340XkGnrimxZ0elXyoQJuN5sbg6C4evKA4SQ==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.37.0.tgz", + "integrity": "sha512-zuWDMDuzMRbQOM+bHyU4/slw27bAUEcKSKKs3hcv2aNnc/tvE/h7w60dwVw8vnal2Pub6RT1T7BI8tFZ1fE+yg==", + "dev": true, + "dependencies": { + "@typescript-eslint/project-service": "8.37.0", + "@typescript-eslint/tsconfig-utils": "8.37.0", + "@typescript-eslint/types": "8.37.0", + "@typescript-eslint/visitor-keys": "8.37.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.37.0.tgz", + "integrity": "sha512-TSFvkIW6gGjN2p6zbXo20FzCABbyUAuq6tBvNRGsKdsSQ6a7rnV6ADfZ7f4iI3lIiXc4F4WWvtUfDw9CJ9pO5A==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.37.0", + "@typescript-eslint/types": "8.37.0", + "@typescript-eslint/typescript-estree": "8.37.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.37.0.tgz", + "integrity": "sha512-YzfhzcTnZVPiLfP/oeKtDp2evwvHLMe0LOy7oe+hb9KKIumLNohYS9Hgp1ifwpu42YWxhZE8yieggz6JpqO/1w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.37.0", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@uiw/copy-to-clipboard": { + "version": "1.0.17", + "resolved": "https://registry.npmjs.org/@uiw/copy-to-clipboard/-/copy-to-clipboard-1.0.17.tgz", + "integrity": "sha512-O2GUHV90Iw2VrSLVLK0OmNIMdZ5fgEg4NhvtwINsX+eZ/Wf6DWD0TdsK9xwV7dNRnK/UI2mQtl0a2/kRgm1m1A==", + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, + "node_modules/@uiw/react-markdown-preview": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/@uiw/react-markdown-preview/-/react-markdown-preview-5.1.5.tgz", + "integrity": "sha512-DNOqx1a6gJR7Btt57zpGEKTfHRlb7rWbtctMRO2f82wWcuoJsxPBrM+JWebDdOD0LfD8oe2CQvW2ICQJKHQhZg==", + "dependencies": { + "@babel/runtime": "^7.17.2", + "@uiw/copy-to-clipboard": "~1.0.12", + "react-markdown": "~9.0.1", + "rehype-attr": "~3.0.1", + "rehype-autolink-headings": "~7.1.0", + "rehype-ignore": "^2.0.0", + "rehype-prism-plus": "2.0.0", + "rehype-raw": "^7.0.0", + "rehype-rewrite": "~4.0.0", + "rehype-slug": "~6.0.0", + "remark-gfm": "~4.0.0", + "remark-github-blockquote-alert": "^1.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@uiw/react-markdown-preview/node_modules/react-markdown": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-9.0.3.tgz", + "integrity": "sha512-Yk7Z94dbgYTOrdk41Z74GoKA7rThnsbbqBTRYuxoe08qvfQ9tJVhmAKw6BJS/ZORG7kTy/s1QvYzSuaoBA1qfw==", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" + } + }, + "node_modules/@uiw/react-markdown-preview/node_modules/rehype-prism-plus": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/rehype-prism-plus/-/rehype-prism-plus-2.0.0.tgz", + "integrity": "sha512-FeM/9V2N7EvDZVdR2dqhAzlw5YI49m9Tgn7ZrYJeYHIahM6gcXpH0K1y2gNnKanZCydOMluJvX2cB9z3lhY8XQ==", + "dependencies": { + "hast-util-to-string": "^3.0.0", + "parse-numeric-range": "^1.3.0", + "refractor": "^4.8.0", + "rehype-parse": "^9.0.0", + "unist-util-filter": "^5.0.0", + "unist-util-visit": "^5.0.0" + } + }, + "node_modules/@uiw/react-md-editor": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@uiw/react-md-editor/-/react-md-editor-4.0.8.tgz", + "integrity": "sha512-S3mOzZeGmJNhzdXJxRTCwsFMDp8nBWeQUf59cK3L6QHzDUHnRoHpcmWpfVRyKGKSg8zaI2+meU5cYWf8kYn3mQ==", + "dependencies": { + "@babel/runtime": "^7.14.6", + "@uiw/react-markdown-preview": "^5.0.6", + "rehype": "~13.0.0", + "rehype-prism-plus": "~2.0.0" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==" + }, + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/accessor-fn": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/accessor-fn/-/accessor-fn-1.5.3.tgz", + "integrity": "sha512-rkAofCwe/FvYFUlMB0v0gWmhqtfAtV1IUkdPbfhTUyYniu5LrC0A0UJkTH0Jv3S8SvwkmfuAlY+mQIJATdocMA==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/aria-hidden": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", + "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/aria-query": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", + "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", + "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.0", + "es-object-atoms": "^1.1.1", + "get-intrinsic": "^1.3.0", + "is-string": "^1.1.1", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz", + "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-shim-unscopables": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ast-types-flow": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.8.tgz", + "integrity": "sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==", + "dev": true + }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axe-core": { + "version": "4.10.3", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.10.3.tgz", + "integrity": "sha512-Xm7bpRXnDSX2YE2YFfBk2FnF0ep6tmG7xPh8iHee8MIcrgq762Nkce856dYtJYLkuIoYZvGfTs/PbZhideTcEg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/axios": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.0.tgz", + "integrity": "sha512-oXTDccv8PcfjZmPGlWsPSwtOJCZ/b6W5jAMCNcfwJbCzDckwG0jrYJFaWH1yvivfCXjVzV/SPDEhMB3Q+DSurg==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/bcp-47-match": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz", + "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/bezier-js": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/bezier-js/-/bezier-js-6.1.4.tgz", + "integrity": "sha512-PA0FW9ZpcHbojUCMu28z9Vg/fNkwTj5YhusSAjHHDfHDGLxJ6YUKrAN2vk1fP2MMOxVw4Oko16FMlRGVBGqLKg==", + "license": "MIT", + "funding": { + "type": "individual", + "url": "https://github.com/Pomax/bezierjs/blob/master/FUNDING.md" + } + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "dev": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dev": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001727", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz", + "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/canvas-color-tracker": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/canvas-color-tracker/-/canvas-color-tracker-1.3.2.tgz", + "integrity": "sha512-ryQkDX26yJ3CXzb3hxUVNlg1NKE4REc5crLBq661Nxzr8TNd236SaEf2ffYLXyI5tSABSeguHLqcVq4vf9L3Zg==", + "license": "MIT", + "dependencies": { + "tinycolor2": "^1.6.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chevrotain": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz", + "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/cst-dts-gen": "11.0.3", + "@chevrotain/gast": "11.0.3", + "@chevrotain/regexp-to-ast": "11.0.3", + "@chevrotain/types": "11.0.3", + "@chevrotain/utils": "11.0.3", + "lodash-es": "4.17.21" + } + }, + "node_modules/chevrotain-allstar": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz", + "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==", + "license": "MIT", + "dependencies": { + "lodash-es": "^4.17.21" + }, + "peerDependencies": { + "chevrotain": "^11.0.0" + } + }, + "node_modules/chevrotain/node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/chownr": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/class-variance-authority": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz", + "integrity": "sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==", + "dependencies": { + "clsx": "^2.1.1" + }, + "funding": { + "url": "https://polar.sh/cva" + } + }, + "node_modules/client-only": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", + "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/cmdk": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cmdk/-/cmdk-1.1.1.tgz", + "integrity": "sha512-Vsv7kFaXm+ptHDMZ7izaRsP70GgrW9NBNGswt9OZaVBLlE0SNpDq8eu/VGXyF9r7M0azK3Wy7OlYXsuyYLFzHg==", + "dependencies": { + "@radix-ui/react-compose-refs": "^1.1.1", + "@radix-ui/react-dialog": "^1.1.6", + "@radix-ui/react-id": "^1.1.0", + "@radix-ui/react-primitive": "^2.0.2" + }, + "peerDependencies": { + "react": "^18 || ^19 || ^19.0.0-rc", + "react-dom": "^18 || ^19 || ^19.0.0-rc" + } + }, + "node_modules/color": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", + "optional": true, + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "engines": { + "node": ">=12.5.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "devOptional": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "devOptional": true + }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "optional": true, + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", + "license": "MIT" + }, + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "license": "MIT", + "dependencies": { + "layout-base": "^1.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-selector-parser": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-3.1.3.tgz", + "integrity": "sha512-gJMigczVZqYAk0hPVzx/M4Hm1D9QOtqkdQk9005TNzDIUGzo5cnHEDiKUT7jGPximL/oYb+LIitcHFQ4aKupxg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ] + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" + }, + "node_modules/cytoscape": { + "version": "3.33.1", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz", + "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==", + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "license": "MIT", + "dependencies": { + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", + "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", + "license": "MIT", + "dependencies": { + "cose-base": "^2.2.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/cose-base": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", + "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", + "license": "MIT", + "dependencies": { + "layout-base": "^2.0.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/layout-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", + "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", + "license": "MIT" + }, + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "license": "ISC", + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-binarytree": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/d3-binarytree/-/d3-binarytree-1.0.2.tgz", + "integrity": "sha512-cElUNH+sHu95L04m92pG73t2MEJXKu+GeKUN1TJkFsu93E5W8E9Sc3kHEGJKgenGvj19m6upSn2EunvMgMD2Yw==", + "license": "MIT" + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "license": "ISC", + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "license": "ISC", + "dependencies": { + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "license": "ISC", + "dependencies": { + "delaunator": "5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "license": "ISC", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "license": "ISC", + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force-3d": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/d3-force-3d/-/d3-force-3d-3.0.6.tgz", + "integrity": "sha512-4tsKHUPLOVkyfEffZo1v6sFHvGFwAIIjt/W8IThbp08DYAsXZck+2pSHEG5W1+gQgEvFLdZkYvmJAbRM2EzMnA==", + "license": "MIT", + "dependencies": { + "d3-binarytree": "1", + "d3-dispatch": "1 - 3", + "d3-octree": "1", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-octree": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/d3-octree/-/d3-octree-1.1.0.tgz", + "integrity": "sha512-F8gPlqpP+HwRPMO/8uOu5wjH110+6q4cgJvgJT6vlpy3BEaDIKlTZrgHKZSp/i1InRpVfh4puY/kvL6MxK930A==", + "license": "MIT" + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "license": "BSD-3-Clause", + "dependencies": { + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "license": "BSD-3-Clause", + "dependencies": { + "internmap": "^1.0.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", + "license": "BSD-3-Clause" + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "license": "BSD-3-Clause", + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", + "license": "ISC" + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/dagre-d3-es": { + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz", + "integrity": "sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==", + "license": "MIT", + "dependencies": { + "d3": "^7.9.0", + "lodash-es": "^4.17.21" + } + }, + "node_modules/damerau-levenshtein": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", + "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", + "dev": true + }, + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/dayjs": { + "version": "1.11.19", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", + "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz", + "integrity": "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "license": "ISC", + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-libc": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", + "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", + "devOptional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/direction": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/direction/-/direction-2.0.1.tgz", + "integrity": "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA==", + "bin": { + "direction": "cli.js" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dompurify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/enhanced-resolve": { + "version": "5.18.2", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.2.tgz", + "integrity": "sha512-6Jw4sE1maoRJo3q8MsSIn2onJFbLTOjY9hlx4DZXmOKvLRd1Ok2kXmAGXaafL2+ijsJZ1ClYbl/pmqr9+k4iUQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/entities": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", + "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/es-abstract": { + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-iterator-helpers": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.1.tgz", + "integrity": "sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.6", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "iterator.prototype": "^1.1.4", + "safe-array-concat": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-to-primitive": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "dependencies": { + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.31.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.31.0.tgz", + "integrity": "sha512-QldCVh/ztyKJJZLr4jXNUByx3gR+TDYZCRXEktiZoUR3PGy4qCmSbkxcIle8GEwGpb5JBZazlaJ/CxLidXdEbQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.0", + "@eslint/core": "^0.15.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.31.0", + "@eslint/plugin-kit": "^0.3.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-config-next": { + "version": "15.4.2", + "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-15.4.2.tgz", + "integrity": "sha512-rAeZyTWn1/36Y+S+KpJ/W+RAUmM6fpBWsON4Uci+5l9DIKrhkMK0rgAZQ45ktx+xFk5tyYwkTBGit/9jalsHrw==", + "dev": true, + "dependencies": { + "@next/eslint-plugin-next": "15.4.2", + "@rushstack/eslint-patch": "^1.10.3", + "@typescript-eslint/eslint-plugin": "^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0", + "@typescript-eslint/parser": "^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-import-resolver-typescript": "^3.5.2", + "eslint-plugin-import": "^2.31.0", + "eslint-plugin-jsx-a11y": "^6.10.0", + "eslint-plugin-react": "^7.37.0", + "eslint-plugin-react-hooks": "^5.0.0" + }, + "peerDependencies": { + "eslint": "^7.23.0 || ^8.0.0 || ^9.0.0", + "typescript": ">=3.3.1" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-import-resolver-typescript": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.10.1.tgz", + "integrity": "sha512-A1rHYb06zjMGAxdLSkN2fXPBwuSaQ0iO5M/hdyS0Ajj1VBaRp0sPD3dn1FhME3c/JluGFbwSxyCfqdSbtQLAHQ==", + "dev": true, + "dependencies": { + "@nolyfill/is-core-module": "1.0.39", + "debug": "^4.4.0", + "get-tsconfig": "^4.10.0", + "is-bun-module": "^2.0.0", + "stable-hash": "^0.0.5", + "tinyglobby": "^0.2.13", + "unrs-resolver": "^1.6.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-import-resolver-typescript" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*", + "eslint-plugin-import-x": "*" + }, + "peerDependenciesMeta": { + "eslint-plugin-import": { + "optional": true + }, + "eslint-plugin-import-x": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz", + "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==", + "dev": true, + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.32.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz", + "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", + "dev": true, + "dependencies": { + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.9", + "array.prototype.findlastindex": "^1.2.6", + "array.prototype.flat": "^1.3.3", + "array.prototype.flatmap": "^1.3.3", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.12.1", + "hasown": "^2.0.2", + "is-core-module": "^2.16.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.1", + "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.9", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-plugin-jsx-a11y": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.10.2.tgz", + "integrity": "sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==", + "dev": true, + "dependencies": { + "aria-query": "^5.3.2", + "array-includes": "^3.1.8", + "array.prototype.flatmap": "^1.3.2", + "ast-types-flow": "^0.0.8", + "axe-core": "^4.10.0", + "axobject-query": "^4.1.0", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "hasown": "^2.0.2", + "jsx-ast-utils": "^3.3.5", + "language-tags": "^1.0.9", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "safe-regex-test": "^1.0.3", + "string.prototype.includes": "^2.0.1" + }, + "engines": { + "node": ">=4.0" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9" + } + }, + "node_modules/eslint-plugin-react": { + "version": "7.37.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.5.tgz", + "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.3", + "array.prototype.tosorted": "^1.1.4", + "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.2.1", + "estraverse": "^5.3.0", + "hasown": "^2.0.2", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.9", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.1", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.12", + "string.prototype.repeat": "^1.0.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.2.0.tgz", + "integrity": "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-react/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz", + "integrity": "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true + }, + "node_modules/float-tooltip": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/float-tooltip/-/float-tooltip-1.7.5.tgz", + "integrity": "sha512-/kXzuDnnBqyyWyhDMH7+PfP8J/oXiAavGzcRxASOMRHFuReDtofizLLJsf7nnDLAfEaMW4pVWaXrAjtnglpEkg==", + "license": "MIT", + "dependencies": { + "d3-selection": "2 - 3", + "kapsule": "^1.16", + "preact": "10" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/force-graph": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/force-graph/-/force-graph-1.51.0.tgz", + "integrity": "sha512-aTnihCmiMA0ItLJLCbrQYS9mzriopW24goFPgUnKAAmAlPogTSmFWqoBPMXzIfPb7bs04Hur5zEI4WYgLW3Sig==", + "license": "MIT", + "dependencies": { + "@tweenjs/tween.js": "18 - 25", + "accessor-fn": "1", + "bezier-js": "3 - 6", + "canvas-color-tracker": "^1.3", + "d3-array": "1 - 3", + "d3-drag": "2 - 3", + "d3-force-3d": "2 - 3", + "d3-scale": "1 - 4", + "d3-scale-chromatic": "1 - 3", + "d3-selection": "2 - 3", + "d3-zoom": "2 - 3", + "float-tooltip": "^1.7", + "index-array-by": "1", + "kapsule": "^1.16", + "lodash-es": "4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", + "engines": { + "node": ">=6" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-tsconfig": { + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.1.tgz", + "integrity": "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==", + "dev": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/github-slugger": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz", + "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==" + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/hachure-fill": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz", + "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==", + "license": "MIT" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "dev": true, + "dependencies": { + "dunder-proto": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hast-util-from-html": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz", + "integrity": "sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.1.0", + "hast-util-from-parse5": "^8.0.0", + "parse5": "^7.0.0", + "vfile": "^6.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz", + "integrity": "sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^9.0.0", + "property-information": "^7.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-has-property": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-3.0.0.tgz", + "integrity": "sha512-MNilsvEKLFpV604hwfhVStK0usFY/QmM5zX16bo7EjnAEGofr5YyI37kzopBlZJkHD4t887i+q/C8/tr5Q94cA==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-heading-rank": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-3.0.0.tgz", + "integrity": "sha512-EJKb8oMUXVHcWZTDepnr+WNbfnXKFNf9duMesmr4S8SXTJBJ9M4Yok08pu9vxdJwdlGRhVumk9mEhkEvKGifwA==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-is-element": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz", + "integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-raw": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.1.0.tgz", + "integrity": "sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-from-parse5": "^8.0.0", + "hast-util-to-parse5": "^8.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "parse5": "^7.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-select": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/hast-util-select/-/hast-util-select-6.0.4.tgz", + "integrity": "sha512-RqGS1ZgI0MwxLaKLDxjprynNzINEkRHY2i8ln4DDjgv9ZhcYVIHN9rlpiYsqtFwrgpYU361SyWDQcGNIBVu3lw==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "bcp-47-match": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "css-selector-parser": "^3.0.0", + "devlop": "^1.0.0", + "direction": "^2.0.0", + "hast-util-has-property": "^3.0.0", + "hast-util-to-string": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "nth-check": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-html": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", + "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^3.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "zwitch": "^2.0.4" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", + "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-js": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-parse5": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz", + "integrity": "sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-parse5/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-string": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz", + "integrity": "sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hastscript": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", + "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/html-url-attributes": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz", + "integrity": "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/index-array-by": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/index-array-by/-/index-array-by-1.4.2.tgz", + "integrity": "sha512-SP23P27OUKzXWEC/TOyWlwLviofQkCSCKONnc62eItjp69yCZZPqDQtr3Pw5gJDnPeUMqExmKydNZaJO0FU9pw==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/inline-style-parser": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.4.tgz", + "integrity": "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==" + }, + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "optional": true + }, + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "dev": true, + "dependencies": { + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "dev": true, + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bun-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-bun-module/-/is-bun-module-2.0.0.tgz", + "integrity": "sha512-gNCGbnnnnFAUGKeZ9PdbyeGYJqewpmc2aKHUEMO5nQPWU9lOmv7jcmQIv+qHD8fXW6W7qfuCwX4rY9LNRjXrkQ==", + "dev": true, + "dependencies": { + "semver": "^7.7.1" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", + "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "get-proto": "^1.0.0", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/iterator.prototype": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", + "integrity": "sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "get-proto": "^1.0.0", + "has-symbols": "^1.1.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/jerrypick": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/jerrypick/-/jerrypick-1.1.2.tgz", + "integrity": "sha512-YKnxXEekXKzhpf7CLYA0A+oDP8V0OhICNCr5lv96FvSsDEmrb0GKM776JgQvHTMjr7DTTPEVv/1Ciaw0uEWzBA==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/jiti": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", + "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", + "dev": true, + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/jsx-ast-utils": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", + "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "object.assign": "^4.1.4", + "object.values": "^1.1.6" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/kapsule": { + "version": "1.16.3", + "resolved": "https://registry.npmjs.org/kapsule/-/kapsule-1.16.3.tgz", + "integrity": "sha512-4+5mNNf4vZDSwPhKprKwz3330iisPrb08JyMgbsdFrimBCKNHecua/WBwvVg3n7vwx0C1ARjfhwIpbrbd9n5wg==", + "license": "MIT", + "dependencies": { + "lodash-es": "4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/katex": { + "version": "0.16.27", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.27.tgz", + "integrity": "sha512-aeQoDkuRWSqQN6nSvVCEFvfXdqo1OQiCmmW1kc9xSdjutPv7BGO7pqY9sQRJpMOGrEdfDgF2TfRXe5eUAD2Waw==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "license": "MIT", + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" + }, + "node_modules/langium": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz", + "integrity": "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==", + "license": "MIT", + "dependencies": { + "chevrotain": "~11.0.3", + "chevrotain-allstar": "~0.3.0", + "vscode-languageserver": "~9.0.1", + "vscode-languageserver-textdocument": "~1.0.11", + "vscode-uri": "~3.0.8" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/language-subtag-registry": { + "version": "0.3.23", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz", + "integrity": "sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==", + "dev": true + }, + "node_modules/language-tags": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.9.tgz", + "integrity": "sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==", + "dev": true, + "dependencies": { + "language-subtag-registry": "^0.3.20" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", + "license": "MIT" + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lightningcss": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", + "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", + "dev": true, + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-darwin-arm64": "1.30.1", + "lightningcss-darwin-x64": "1.30.1", + "lightningcss-freebsd-x64": "1.30.1", + "lightningcss-linux-arm-gnueabihf": "1.30.1", + "lightningcss-linux-arm64-gnu": "1.30.1", + "lightningcss-linux-arm64-musl": "1.30.1", + "lightningcss-linux-x64-gnu": "1.30.1", + "lightningcss-linux-x64-musl": "1.30.1", + "lightningcss-win32-arm64-msvc": "1.30.1", + "lightningcss-win32-x64-msvc": "1.30.1" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz", + "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz", + "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz", + "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz", + "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz", + "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz", + "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz", + "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz", + "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz", + "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz", + "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash-es": { + "version": "4.17.22", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", + "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", + "license": "MIT" + }, + "node_modules/lodash.castarray": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz", + "integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lucide-react": { + "version": "0.525.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.525.0.tgz", + "integrity": "sha512-Tm1txJ2OkymCGkvwoHt33Y2JpN5xucVq1slHcgE6Lk0WjDfjgKWor5CdVER8U6DvcfMwh4M8XxmpTiyzfmfDYQ==", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/magic-string": { + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/marked": { + "version": "16.4.2", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.4.2.tgz", + "integrity": "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", + "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", + "dependencies": { + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", + "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", + "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mermaid": { + "version": "11.12.2", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.2.tgz", + "integrity": "sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==", + "license": "MIT", + "dependencies": { + "@braintree/sanitize-url": "^7.1.1", + "@iconify/utils": "^3.0.1", + "@mermaid-js/parser": "^0.6.3", + "@types/d3": "^7.4.3", + "cytoscape": "^3.29.3", + "cytoscape-cose-bilkent": "^4.1.0", + "cytoscape-fcose": "^2.2.0", + "d3": "^7.9.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.13", + "dayjs": "^1.11.18", + "dompurify": "^3.2.5", + "katex": "^0.16.22", + "khroma": "^2.1.0", + "lodash-es": "^4.17.21", + "marked": "^16.2.1", + "roughjs": "^4.6.6", + "stylis": "^4.3.6", + "ts-dedent": "^2.2.0", + "uuid": "^11.1.0" + } + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", + "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minizlib": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", + "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "dev": true, + "dependencies": { + "minipass": "^7.1.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/mkdirp": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "dev": true, + "bin": { + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mlly": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", + "license": "MIT", + "dependencies": { + "acorn": "^8.15.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "ufo": "^1.6.1" + } + }, + "node_modules/monaco-editor": { + "version": "0.52.2", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz", + "integrity": "sha512-GEQWEZmfkOGLdd3XK8ryrfWz3AIP8YymVXiPHEdewrUq7mh0qrKrfHLNCXcbB6sTnMLnOZ3ztSiKcciFUkIJwQ==", + "peer": true + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/napi-postinstall": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.2.tgz", + "integrity": "sha512-tWVJxJHmBWLy69PvO96TZMZDrzmw5KeiZBz3RHmiM2XZ9grBJ2WgMAFVVg25nqp3ZjTFUs2Ftw1JhscL3Teliw==", + "dev": true, + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/next": { + "version": "15.4.10", + "resolved": "https://registry.npmjs.org/next/-/next-15.4.10.tgz", + "integrity": "sha512-itVlc79QjpKMFMRhP+kbGKaSG/gZM6RCvwhEbwmCNF06CdDiNaoHcbeg0PqkEa2GOcn8KJ0nnc7+yL7EjoYLHQ==", + "license": "MIT", + "dependencies": { + "@next/env": "15.4.10", + "@swc/helpers": "0.5.15", + "caniuse-lite": "^1.0.30001579", + "postcss": "8.4.31", + "styled-jsx": "5.1.6" + }, + "bin": { + "next": "dist/bin/next" + }, + "engines": { + "node": "^18.18.0 || ^19.8.0 || >= 20.0.0" + }, + "optionalDependencies": { + "@next/swc-darwin-arm64": "15.4.8", + "@next/swc-darwin-x64": "15.4.8", + "@next/swc-linux-arm64-gnu": "15.4.8", + "@next/swc-linux-arm64-musl": "15.4.8", + "@next/swc-linux-x64-gnu": "15.4.8", + "@next/swc-linux-x64-musl": "15.4.8", + "@next/swc-win32-arm64-msvc": "15.4.8", + "@next/swc-win32-x64-msvc": "15.4.8", + "sharp": "^0.34.3" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0", + "@playwright/test": "^1.51.1", + "babel-plugin-react-compiler": "*", + "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", + "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", + "sass": "^1.3.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@playwright/test": { + "optional": true + }, + "babel-plugin-react-compiler": { + "optional": true + }, + "sass": { + "optional": true + } + } + }, + "node_modules/next-themes": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/next-themes/-/next-themes-0.4.6.tgz", + "integrity": "sha512-pZvgD5L0IEvX5/9GWyHMf3m8BKiVQwsCMHfoFosXtXBMnaS0ZnIJ9ST4b4NqLVKDEm8QBxoNNGNaBv2JNF6XNA==", + "peerDependencies": { + "react": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc" + } + }, + "node_modules/next/node_modules/postcss": { + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", + "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.values": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", + "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-manager-detector": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.6.0.tgz", + "integrity": "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==", + "license": "MIT" + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-entities": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", + "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==" + }, + "node_modules/parse-numeric-range": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz", + "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==" + }, + "node_modules/parse5": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", + "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "dependencies": { + "entities": "^6.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/path-data-parser": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz", + "integrity": "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==", + "license": "MIT" + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkg-types": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "license": "MIT", + "dependencies": { + "confbox": "^0.1.8", + "mlly": "^1.7.4", + "pathe": "^2.0.1" + } + }, + "node_modules/points-on-curve": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz", + "integrity": "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==", + "license": "MIT" + }, + "node_modules/points-on-path": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz", + "integrity": "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==", + "license": "MIT", + "dependencies": { + "path-data-parser": "0.1.0", + "points-on-curve": "0.2.0" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/preact": { + "version": "10.28.1", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.28.1.tgz", + "integrity": "sha512-u1/ixq/lVQI0CakKNvLDEcW5zfCjUQfZdK9qqWuIJtsezuyG6pk9TWj75GMuI/EzRSZB/VAE43sNWWZfiy8psw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/preact" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/react": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", + "integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.0.tgz", + "integrity": "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==", + "dependencies": { + "scheduler": "^0.26.0" + }, + "peerDependencies": { + "react": "^19.1.0" + } + }, + "node_modules/react-force-graph-2d": { + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/react-force-graph-2d/-/react-force-graph-2d-1.29.0.tgz", + "integrity": "sha512-Xv5IIk+hsZmB3F2ibja/t6j/b0/1T9dtFOQacTUoLpgzRHrO6wPu1GtQ2LfRqI/imgtaapnXUgQaE8g8enPo5w==", + "license": "MIT", + "dependencies": { + "force-graph": "^1.51", + "prop-types": "15", + "react-kapsule": "^2.5" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "react": "*" + } + }, + "node_modules/react-hook-form": { + "version": "7.60.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.60.0.tgz", + "integrity": "sha512-SBrYOvMbDB7cV8ZfNpaiLcgjH/a1c7aK0lK+aNigpf4xWLO8q+o4tcvVurv3c4EOyzn/3dCsYt4GKD42VvJ/+A==", + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-hook-form" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17 || ^18 || ^19" + } + }, + "node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + }, + "node_modules/react-kapsule": { + "version": "2.5.7", + "resolved": "https://registry.npmjs.org/react-kapsule/-/react-kapsule-2.5.7.tgz", + "integrity": "sha512-kifAF4ZPD77qZKc4CKLmozq6GY1sBzPEJTIJb0wWFK6HsePJatK3jXplZn2eeAt3x67CDozgi7/rO8fNQ/AL7A==", + "license": "MIT", + "dependencies": { + "jerrypick": "^1.1.1" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "react": ">=16.13.1" + } + }, + "node_modules/react-markdown": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-10.1.0.tgz", + "integrity": "sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" + } + }, + "node_modules/react-remove-scroll": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.1.tgz", + "integrity": "sha512-HpMh8+oahmIdOuS5aFKKY6Pyog+FNaZV/XyJOq7b4YFwsFHe5yYfdbIalI4k3vU2nSDql7YskmUseHsRrJqIPA==", + "dependencies": { + "react-remove-scroll-bar": "^2.3.7", + "react-style-singleton": "^2.2.3", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.3", + "use-sidecar": "^1.1.3" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", + "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", + "dependencies": { + "react-style-singleton": "^2.2.2", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-style-singleton": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", + "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", + "dependencies": { + "get-nonce": "^1.0.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/refractor": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/refractor/-/refractor-4.9.0.tgz", + "integrity": "sha512-nEG1SPXFoGGx+dcjftjv8cAjEusIh6ED1xhf5DG3C0x/k+rmZ2duKnc3QLpt6qeHv5fPb8uwN3VWN2BT7fr3Og==", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/prismjs": "^1.0.0", + "hastscript": "^7.0.0", + "parse-entities": "^4.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/@types/hast": { + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz", + "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/refractor/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==" + }, + "node_modules/refractor/node_modules/hast-util-parse-selector": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz", + "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==", + "dependencies": { + "@types/hast": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/refractor/node_modules/hastscript": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz", + "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==", + "dependencies": { + "@types/hast": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^3.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/refractor/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rehype": { + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/rehype/-/rehype-13.0.2.tgz", + "integrity": "sha512-j31mdaRFrwFRUIlxGeuPXXKWQxet52RBQRvCmzl5eCefn/KGbomK5GMHNMsOJf55fgo3qw5tST5neDuarDYR2A==", + "dependencies": { + "@types/hast": "^3.0.0", + "rehype-parse": "^9.0.0", + "rehype-stringify": "^10.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-attr": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/rehype-attr/-/rehype-attr-3.0.3.tgz", + "integrity": "sha512-Up50Xfra8tyxnkJdCzLBIBtxOcB2M1xdeKe1324U06RAvSjYm7ULSeoM+b/nYPQPVd7jsXJ9+39IG1WAJPXONw==", + "dependencies": { + "unified": "~11.0.0", + "unist-util-visit": "~5.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, + "node_modules/rehype-autolink-headings": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/rehype-autolink-headings/-/rehype-autolink-headings-7.1.0.tgz", + "integrity": "sha512-rItO/pSdvnvsP4QRB1pmPiNHUskikqtPojZKJPPPAVx9Hj8i8TwMBhofrrAYRhYOOBZH9tgmG5lPqDLuIWPWmw==", + "dependencies": { + "@types/hast": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-heading-rank": "^3.0.0", + "hast-util-is-element": "^3.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-ignore": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/rehype-ignore/-/rehype-ignore-2.0.2.tgz", + "integrity": "sha512-BpAT/3lU9DMJ2siYVD/dSR0A/zQgD6Fb+fxkJd4j+wDVy6TYbYpK+FZqu8eM9EuNKGvi4BJR7XTZ/+zF02Dq8w==", + "dependencies": { + "hast-util-select": "^6.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, + "node_modules/rehype-parse": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-9.0.1.tgz", + "integrity": "sha512-ksCzCD0Fgfh7trPDxr2rSylbwq9iYDkSn8TCDmEJ49ljEUBxDVCzCHv7QNzZOfODanX4+bWQ4WZqLCRWYLfhag==", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-from-html": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-prism-plus": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/rehype-prism-plus/-/rehype-prism-plus-2.0.1.tgz", + "integrity": "sha512-Wglct0OW12tksTUseAPyWPo3srjBOY7xKlql/DPKi7HbsdZTyaLCAoO58QBKSczFQxElTsQlOY3JDOFzB/K++Q==", + "dependencies": { + "hast-util-to-string": "^3.0.0", + "parse-numeric-range": "^1.3.0", + "refractor": "^4.8.0", + "rehype-parse": "^9.0.0", + "unist-util-filter": "^5.0.0", + "unist-util-visit": "^5.0.0" + } + }, + "node_modules/rehype-raw": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz", + "integrity": "sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-raw": "^9.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-rewrite": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/rehype-rewrite/-/rehype-rewrite-4.0.2.tgz", + "integrity": "sha512-rjLJ3z6fIV11phwCqHp/KRo8xuUCO8o9bFJCNw5o6O2wlLk6g8r323aRswdGBQwfXPFYeSuZdAjp4tzo6RGqEg==", + "dependencies": { + "hast-util-select": "^6.0.0", + "unified": "^11.0.3", + "unist-util-visit": "^5.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, + "node_modules/rehype-slug": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/rehype-slug/-/rehype-slug-6.0.0.tgz", + "integrity": "sha512-lWyvf/jwu+oS5+hL5eClVd3hNdmwM1kAC0BUvEGD19pajQMIzcNUd/k9GsfQ+FfECvX+JE+e9/btsKH0EjJT6A==", + "dependencies": { + "@types/hast": "^3.0.0", + "github-slugger": "^2.0.0", + "hast-util-heading-rank": "^3.0.0", + "hast-util-to-string": "^3.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-stringify": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-10.0.1.tgz", + "integrity": "sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-to-html": "^9.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-gfm": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", + "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-github-blockquote-alert": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/remark-github-blockquote-alert/-/remark-github-blockquote-alert-1.3.1.tgz", + "integrity": "sha512-OPNnimcKeozWN1w8KVQEuHOxgN3L4rah8geMOLhA5vN9wITqU4FWD+G26tkEsCGHiOVDbISx+Se5rGZ+D1p0Jg==", + "dependencies": { + "unist-util-visit": "^5.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", + "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "license": "Unlicense" + }, + "node_modules/roughjs": { + "version": "4.6.6", + "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz", + "integrity": "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==", + "license": "MIT", + "dependencies": { + "hachure-fill": "^0.5.2", + "path-data-parser": "^0.1.0", + "points-on-curve": "^0.2.0", + "points-on-path": "^0.2.1" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "license": "BSD-3-Clause" + }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/scheduler": { + "version": "0.26.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", + "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==" + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "devOptional": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "dev": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/sharp": { + "version": "0.34.3", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.3.tgz", + "integrity": "sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "color": "^4.2.3", + "detect-libc": "^2.0.4", + "semver": "^7.7.2" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.34.3", + "@img/sharp-darwin-x64": "0.34.3", + "@img/sharp-libvips-darwin-arm64": "1.2.0", + "@img/sharp-libvips-darwin-x64": "1.2.0", + "@img/sharp-libvips-linux-arm": "1.2.0", + "@img/sharp-libvips-linux-arm64": "1.2.0", + "@img/sharp-libvips-linux-ppc64": "1.2.0", + "@img/sharp-libvips-linux-s390x": "1.2.0", + "@img/sharp-libvips-linux-x64": "1.2.0", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.0", + "@img/sharp-libvips-linuxmusl-x64": "1.2.0", + "@img/sharp-linux-arm": "0.34.3", + "@img/sharp-linux-arm64": "0.34.3", + "@img/sharp-linux-ppc64": "0.34.3", + "@img/sharp-linux-s390x": "0.34.3", + "@img/sharp-linux-x64": "0.34.3", + "@img/sharp-linuxmusl-arm64": "0.34.3", + "@img/sharp-linuxmusl-x64": "0.34.3", + "@img/sharp-wasm32": "0.34.3", + "@img/sharp-win32-arm64": "0.34.3", + "@img/sharp-win32-ia32": "0.34.3", + "@img/sharp-win32-x64": "0.34.3" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "optional": true, + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/sonner": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/sonner/-/sonner-2.0.6.tgz", + "integrity": "sha512-yHFhk8T/DK3YxjFQXIrcHT1rGEeTLliVzWbO0xN8GberVun2RiBnxAjXAYpZrqwEVHBG9asI/Li8TAAhN9m59Q==", + "peerDependencies": { + "react": "^18.0.0 || ^19.0.0 || ^19.0.0-rc", + "react-dom": "^18.0.0 || ^19.0.0 || ^19.0.0-rc" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/stable-hash": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz", + "integrity": "sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==", + "dev": true + }, + "node_modules/state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==" + }, + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string.prototype.includes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.includes/-/string.prototype.includes-2.0.1.tgz", + "integrity": "sha512-o7+c9bW6zpAdJHTtujeePODAhkuicdAryFsfVKwA+wGw89wJ4GTY484WTucM9hLtDEOpOvI+aHnzqnC5lHp4Rg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.12", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.12.tgz", + "integrity": "sha512-6CC9uyBL+/48dYizRf7H7VAYCMCNTBeM78x/VTUe9bFEaxBepPJDa1Ow99LqI/1yF7kuy7Q3cQsYMrcjGUcskA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "regexp.prototype.flags": "^1.5.3", + "set-function-name": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/style-to-js": { + "version": "1.1.17", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.17.tgz", + "integrity": "sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA==", + "dependencies": { + "style-to-object": "1.0.9" + } + }, + "node_modules/style-to-object": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.9.tgz", + "integrity": "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw==", + "dependencies": { + "inline-style-parser": "0.2.4" + } + }, + "node_modules/styled-jsx": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz", + "integrity": "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==", + "dependencies": { + "client-only": "0.0.1" + }, + "engines": { + "node": ">= 12.0.0" + }, + "peerDependencies": { + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/stylis": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz", + "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==", + "license": "MIT" + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwind-merge": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.3.1.tgz", + "integrity": "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwindcss": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.11.tgz", + "integrity": "sha512-2E9TBm6MDD/xKYe+dvJZAmg3yxIEDNRc0jwlNyDg/4Fil2QcSLjFKGVff0lAf1jjeaArlG/M75Ey/EYr/OJtBA==" + }, + "node_modules/tapable": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.2.tgz", + "integrity": "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", + "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "dev": true, + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/tinycolor2": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.6.0.tgz", + "integrity": "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==", + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "license": "MIT", + "engines": { + "node": ">=6.10" + } + }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, + "node_modules/tw-animate-css": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/tw-animate-css/-/tw-animate-css-1.3.5.tgz", + "integrity": "sha512-t3u+0YNoloIhj1mMXs779P6MO9q3p3mvGn4k1n3nJPqJw/glZcuijG2qTSN4z4mgNRfW5ZC3aXJFLwDtiipZXA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/Wombosvideo" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ufo": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", + "license": "MIT" + }, + "node_modules/unbox-primitive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true + }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-filter": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/unist-util-filter/-/unist-util-filter-5.0.1.tgz", + "integrity": "sha512-pHx7D4Zt6+TsfwylH9+lYhBhzyhEnCXs/lbq/Hstxno5z4gVdyc2WEW0asfjGKPyG4pEKrnBv5hdkO6+aRnQJw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/use-callback-ref": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", + "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-debounce": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/use-debounce/-/use-debounce-10.0.6.tgz", + "integrity": "sha512-C5OtPyhAZgVoteO9heXMTdW7v/IbFI+8bSVKYCJrSmiWWCLsbUxiBSp4t9v0hNBTGY97bT72ydDIDyGSFWfwXg==", + "engines": { + "node": ">= 16.0.0" + }, + "peerDependencies": { + "react": "*" + } + }, + "node_modules/use-sidecar": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", + "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-location": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", + "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vscode-jsonrpc": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", + "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageserver": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", + "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", + "license": "MIT", + "dependencies": { + "vscode-languageserver-protocol": "3.17.5" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", + "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", + "license": "MIT", + "dependencies": { + "vscode-jsonrpc": "8.2.0", + "vscode-languageserver-types": "3.17.5" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", + "license": "MIT" + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", + "license": "MIT" + }, + "node_modules/vscode-uri": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz", + "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", + "license": "MIT" + }, + "node_modules/web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "dependencies": { + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yallist": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.0.5.tgz", + "integrity": "sha512-/5UuuRPStvHXu7RS+gmvRf4NXrNxpSllGwDnCBcJZtQsKrviYXm54yDGV2KYNLT5kq0lHGcl7lqWJLgSaG+tgA==", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zustand": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.6.tgz", + "integrity": "sha512-ihAqNeUVhe0MAD+X8M5UzqyZ9k3FFZLBTtqo6JLPwV53cbRB/mJwBI0PxcIgqhBBHlEs8G45OTDTMq3gNcLq3A==", + "engines": { + "node": ">=12.20.0" + }, + "peerDependencies": { + "@types/react": ">=18.0.0", + "immer": ">=9.0.6", + "react": ">=18.0.0", + "use-sync-external-store": ">=1.2.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "immer": { + "optional": true + }, + "react": { + "optional": true + }, + "use-sync-external-store": { + "optional": true + } + } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000000000000000000000000000000000000..8179e17d2615bc234d54b0c8268ef652c716d2ca --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,68 @@ +{ + "name": "frontend", + "version": "0.1.0", + "author": "Prem Kumar", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start -H 0.0.0.0", + "lint": "next lint" + }, + "dependencies": { + "@hookform/resolvers": "^5.1.1", + "@monaco-editor/react": "^4.7.0", + "@radix-ui/react-accordion": "^1.2.12", + "@radix-ui/react-alert-dialog": "^1.1.14", + "@radix-ui/react-checkbox": "^1.3.2", + "@radix-ui/react-collapsible": "^1.1.11", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.15", + "@radix-ui/react-label": "^2.1.7", + "@radix-ui/react-popover": "^1.1.15", + "@radix-ui/react-progress": "^1.1.7", + "@radix-ui/react-radio-group": "^1.3.8", + "@radix-ui/react-scroll-area": "^1.2.9", + "@radix-ui/react-select": "^2.2.5", + "@radix-ui/react-separator": "^1.1.7", + "@radix-ui/react-slot": "^1.2.3", + "@radix-ui/react-tabs": "^1.1.12", + "@radix-ui/react-tooltip": "^1.2.7", + "@tailwindcss/typography": "^0.5.16", + "@tanstack/react-query": "^5.83.0", + "@uiw/react-md-editor": "^4.0.8", + "axios": "^1.12.0", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "cmdk": "^1.1.1", + "d3-force": "^3.0.0", + "date-fns": "^4.1.0", + "lucide-react": "^0.525.0", + "mermaid": "^11.12.2", + "next": "15.4.10", + "next-themes": "^0.4.6", + "react": "19.1.0", + "react-dom": "19.1.0", + "react-force-graph-2d": "^1.29.0", + "react-hook-form": "^7.60.0", + "react-markdown": "^10.1.0", + "remark-gfm": "^4.0.1", + "sonner": "^2.0.6", + "tailwind-merge": "^3.3.1", + "use-debounce": "^10.0.6", + "zod": "^4.0.5", + "zustand": "^5.0.6" + }, + "devDependencies": { + "@eslint/eslintrc": "^3", + "@tailwindcss/postcss": "^4", + "@types/node": "^20", + "@types/react": "^19", + "@types/react-dom": "^19", + "eslint": "^9", + "eslint-config-next": "15.4.2", + "tailwindcss": "^4", + "tw-animate-css": "^1.3.5", + "typescript": "^5" + } +} \ No newline at end of file diff --git a/frontend/postcss.config.mjs b/frontend/postcss.config.mjs new file mode 100644 index 0000000000000000000000000000000000000000..c7bcb4b1ee14cd5e25078c2c934529afdd2a7df9 --- /dev/null +++ b/frontend/postcss.config.mjs @@ -0,0 +1,5 @@ +const config = { + plugins: ["@tailwindcss/postcss"], +}; + +export default config; diff --git a/frontend/public/file.svg b/frontend/public/file.svg new file mode 100644 index 0000000000000000000000000000000000000000..004145cddf3f9db91b57b9cb596683c8eb420862 --- /dev/null +++ b/frontend/public/file.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/globe.svg b/frontend/public/globe.svg new file mode 100644 index 0000000000000000000000000000000000000000..567f17b0d7c7fb662c16d4357dd74830caf2dccb --- /dev/null +++ b/frontend/public/globe.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/logo.svg b/frontend/public/logo.svg new file mode 100644 index 0000000000000000000000000000000000000000..87013478bad321ad030ec6d0871309c51dd1d99c --- /dev/null +++ b/frontend/public/logo.svg @@ -0,0 +1,60 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/frontend/public/next.svg b/frontend/public/next.svg new file mode 100644 index 0000000000000000000000000000000000000000..5174b28c565c285e3e312ec5178be64fbeca8398 --- /dev/null +++ b/frontend/public/next.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/vercel.svg b/frontend/public/vercel.svg new file mode 100644 index 0000000000000000000000000000000000000000..77053960334e2e34dc584dea8019925c3b4ccca9 --- /dev/null +++ b/frontend/public/vercel.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/window.svg b/frontend/public/window.svg new file mode 100644 index 0000000000000000000000000000000000000000..b2b2a44f6ebc70c450043c05a002e7a93ba5d651 --- /dev/null +++ b/frontend/public/window.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/app/(auth)/login/page.tsx b/frontend/src/app/(auth)/login/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..ed77e290c3c6b4c5b08f3e15fcde49c9de9a7ac9 --- /dev/null +++ b/frontend/src/app/(auth)/login/page.tsx @@ -0,0 +1,10 @@ +import { LoginForm } from '@/components/auth/LoginForm' +import { ErrorBoundary } from '@/components/common/ErrorBoundary' + +export default function LoginPage() { + return ( + + + + ) +} \ No newline at end of file diff --git a/frontend/src/app/(dashboard)/advanced/components/RebuildEmbeddings.tsx b/frontend/src/app/(dashboard)/advanced/components/RebuildEmbeddings.tsx new file mode 100644 index 0000000000000000000000000000000000000000..3222754688a26b966717440ebbaf8b9b6b39e432 --- /dev/null +++ b/frontend/src/app/(dashboard)/advanced/components/RebuildEmbeddings.tsx @@ -0,0 +1,362 @@ +'use client' + +import { useState, useEffect, useCallback } from 'react' +import { useMutation } from '@tanstack/react-query' +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card' +import { Button } from '@/components/ui/button' +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select' +import { Checkbox } from '@/components/ui/checkbox' +import { Label } from '@/components/ui/label' +import { Alert, AlertDescription } from '@/components/ui/alert' +import { Progress } from '@/components/ui/progress' +import { Loader2, AlertCircle, CheckCircle2, XCircle, Clock } from 'lucide-react' +import { + Accordion, + AccordionContent, + AccordionItem, + AccordionTrigger, +} from '@/components/ui/accordion' +import { embeddingApi } from '@/lib/api/embedding' +import type { RebuildEmbeddingsRequest, RebuildStatusResponse } from '@/lib/api/embedding' + +export function RebuildEmbeddings() { + const [mode, setMode] = useState<'existing' | 'all'>('existing') + const [includeSources, setIncludeSources] = useState(true) + const [includeNotes, setIncludeNotes] = useState(true) + const [includeInsights, setIncludeInsights] = useState(true) + const [commandId, setCommandId] = useState(null) + const [status, setStatus] = useState(null) + const [pollingInterval, setPollingInterval] = useState(null) + + // Rebuild mutation + const rebuildMutation = useMutation({ + mutationFn: async (request: RebuildEmbeddingsRequest) => { + return embeddingApi.rebuildEmbeddings(request) + }, + onSuccess: (data) => { + setCommandId(data.command_id) + // Start polling for status + startPolling(data.command_id) + } + }) + + // Start polling for rebuild status + const startPolling = (cmdId: string) => { + if (pollingInterval) { + clearInterval(pollingInterval) + } + + const interval = setInterval(async () => { + try { + const statusData = await embeddingApi.getRebuildStatus(cmdId) + setStatus(statusData) + + // Stop polling if completed or failed + if (statusData.status === 'completed' || statusData.status === 'failed') { + stopPolling() + } + } catch (error) { + console.error('Failed to fetch rebuild status:', error) + } + }, 5000) // Poll every 5 seconds + + setPollingInterval(interval) + } + + // Stop polling + const stopPolling = useCallback(() => { + if (pollingInterval) { + clearInterval(pollingInterval) + setPollingInterval(null) + } + }, [pollingInterval]) + + // Cleanup on unmount + useEffect(() => { + return () => { + stopPolling() + } + }, [stopPolling]) + + const handleStartRebuild = () => { + const request: RebuildEmbeddingsRequest = { + mode, + include_sources: includeSources, + include_notes: includeNotes, + include_insights: includeInsights + } + + rebuildMutation.mutate(request) + } + + const handleReset = () => { + stopPolling() + setCommandId(null) + setStatus(null) + rebuildMutation.reset() + } + + const isAnyTypeSelected = includeSources || includeNotes || includeInsights + const isRebuildActive = commandId && status && (status.status === 'queued' || status.status === 'running') + + const progressData = status?.progress + const stats = status?.stats + + const totalItems = progressData?.total_items ?? progressData?.total ?? 0 + const processedItems = progressData?.processed_items ?? progressData?.processed ?? 0 + const derivedProgressPercent = progressData?.percentage ?? (totalItems > 0 ? (processedItems / totalItems) * 100 : 0) + const progressPercent = Number.isFinite(derivedProgressPercent) ? derivedProgressPercent : 0 + + const sourcesProcessed = stats?.sources_processed ?? stats?.sources ?? 0 + const notesProcessed = stats?.notes_processed ?? stats?.notes ?? 0 + const insightsProcessed = stats?.insights_processed ?? stats?.insights ?? 0 + const failedItems = stats?.failed_items ?? stats?.failed ?? 0 + + const computedDuration = status?.started_at && status?.completed_at + ? (new Date(status.completed_at).getTime() - new Date(status.started_at).getTime()) / 1000 + : undefined + const processingTimeSeconds = stats?.processing_time ?? computedDuration + + return ( + + + + πŸ”„ Rebuild Embeddings + + + Rebuild vector embeddings for your content. Use this when switching embedding models or fixing corrupted embeddings. + + + + {/* Configuration Form */} + {!isRebuildActive && ( +
+
+ + +

+ {mode === 'existing' + ? 'Re-embed only items that already have embeddings (faster, for model switching)' + : 'Re-embed existing items + create embeddings for items without any (slower, comprehensive)'} +

+
+ +
+ +
+
+ setIncludeSources(checked === true)} + /> + +
+
+ setIncludeNotes(checked === true)} + /> + +
+
+ setIncludeInsights(checked === true)} + /> + +
+
+ {!isAnyTypeSelected && ( + + + + Please select at least one item type to rebuild + + + )} +
+ + + + {rebuildMutation.isError && ( + + + + Failed to start rebuild: {(rebuildMutation.error as Error)?.message || 'Unknown error'} + + + )} +
+ )} + + {/* Status Display */} + {status && ( +
+
+
+ {status.status === 'queued' && } + {status.status === 'running' && } + {status.status === 'completed' && } + {status.status === 'failed' && } +
+ + {status.status === 'queued' && 'Queued'} + {status.status === 'running' && 'Running...'} + {status.status === 'completed' && 'Completed!'} + {status.status === 'failed' && 'Failed'} + + {status.status === 'running' && ( + + You can leave this page as this will run in the background + + )} +
+
+ {(status.status === 'completed' || status.status === 'failed') && ( + + )} +
+ + {progressData && ( +
+
+ Progress + + {processedItems}/{totalItems} items ({progressPercent.toFixed(1)}%) + +
+ + {failedItems > 0 && ( +

+ ⚠️ {failedItems} items failed to process +

+ )} +
+ )} + + {stats && ( +
+
+

Sources

+

{sourcesProcessed}

+
+
+

Notes

+

{notesProcessed}

+
+
+

Insights

+

{insightsProcessed}

+
+
+

Time

+

+ {processingTimeSeconds !== undefined ? `${processingTimeSeconds.toFixed(1)}s` : 'β€”'} +

+
+
+ )} + + {status.error_message && ( + + + {status.error_message} + + )} + + {status.started_at && ( +
+

Started: {new Date(status.started_at).toLocaleString()}

+ {status.completed_at && ( +

Completed: {new Date(status.completed_at).toLocaleString()}

+ )} +
+ )} +
+ )} + + {/* Help Section */} + + + When should I rebuild embeddings? + +

You should rebuild embeddings when:

+
    +
  • Switching embedding models: If you change from one embedding model to another, you need to rebuild all embeddings to ensure consistency.
  • +
  • Upgrading model versions: When updating to a newer version of your embedding model, rebuild to take advantage of improvements.
  • +
  • Fixing corrupted embeddings: If you suspect some embeddings are corrupted or missing, rebuilding can restore them.
  • +
  • After bulk imports: If you imported content without embeddings, use "All" mode to embed everything.
  • +
+
+
+ + + How long does rebuilding take? + +

Processing time depends on:

+
    +
  • Number of items to process
  • +
  • Embedding model speed
  • +
  • API rate limits (for cloud providers)
  • +
  • System resources
  • +
+

Typical rates:

+
    +
  • Local models (Ollama): Very fast, limited only by hardware
  • +
  • Cloud APIs (OpenAI, Google): Moderate speed, may hit rate limits with large datasets
  • +
  • Sources: Slower than notes/insights (creates multiple chunks per source)
  • +
+

Example: Rebuilding 200 items might take 2-5 minutes with cloud APIs, or under 1 minute with local models.

+
+
+ + + Is it safe to rebuild while using the app? + +

Yes, rebuilding is safe! The rebuild process:

+
    +
  • βœ… Is idempotent: Running multiple times produces the same result
  • +
  • βœ… Doesn't delete content: Only replaces embeddings
  • +
  • βœ… Can be run anytime: No need to stop other operations
  • +
  • βœ… Handles errors gracefully: Failed items are logged and skipped
  • +
+

⚠️ However: Very large rebuilds (1000s of items) may temporarily slow down searches while processing.

+
+
+
+
+
+ ) +} diff --git a/frontend/src/app/(dashboard)/advanced/components/SystemInfo.tsx b/frontend/src/app/(dashboard)/advanced/components/SystemInfo.tsx new file mode 100644 index 0000000000000000000000000000000000000000..7916eaec9693206e3b430b163e4b5e6f1ddd2573 --- /dev/null +++ b/frontend/src/app/(dashboard)/advanced/components/SystemInfo.tsx @@ -0,0 +1,117 @@ +'use client' + +import { useEffect, useState } from 'react' +import { Card } from '@/components/ui/card' +import { getConfig } from '@/lib/config' +import { Badge } from '@/components/ui/badge' + +export function SystemInfo() { + const [config, setConfig] = useState<{ + version: string + latestVersion?: string | null + hasUpdate?: boolean + } | null>(null) + const [isLoading, setIsLoading] = useState(true) + + useEffect(() => { + const loadConfig = async () => { + try { + const cfg = await getConfig() + setConfig(cfg) + } catch (error) { + console.error('Failed to load config:', error) + } finally { + setIsLoading(false) + } + } + + loadConfig() + }, []) + + if (isLoading) { + return ( + +
+

System Information

+
Loading...
+
+
+ ) + } + + return ( + +
+

System Information

+ +
+ {/* Current Version */} +
+ Current Version + {config?.version || 'Unknown'} +
+ + {/* Latest Version */} + {config?.latestVersion && ( +
+ Latest Version + {config.latestVersion} +
+ )} + + {/* Update Status */} +
+ Status + {config?.hasUpdate ? ( + + Update Available + + ) : config?.latestVersion ? ( + + Up to Date + + ) : ( + + Unknown + + )} +
+ + {/* GitHub Repository Link */} + {config?.hasUpdate && ( + + )} + + {/* Version Check Failed Message */} + {!config?.latestVersion && config?.version && ( +
+ Unable to check for updates. GitHub may be unreachable. +
+ )} +
+
+
+ ) +} diff --git a/frontend/src/app/(dashboard)/advanced/page.tsx b/frontend/src/app/(dashboard)/advanced/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..71f56ea6e7dd9e659eec1b26c6ec62bf12e7bc25 --- /dev/null +++ b/frontend/src/app/(dashboard)/advanced/page.tsx @@ -0,0 +1,27 @@ +'use client' + +import { AppShell } from '@/components/layout/AppShell' +import { RebuildEmbeddings } from './components/RebuildEmbeddings' +import { SystemInfo } from './components/SystemInfo' + +export default function AdvancedPage() { + return ( + +
+
+
+
+

Advanced

+

+ Advanced tools and utilities for power users +

+
+ + + +
+
+
+
+ ) +} diff --git a/frontend/src/app/(dashboard)/knowledge-graph/components/GraphControls.tsx b/frontend/src/app/(dashboard)/knowledge-graph/components/GraphControls.tsx new file mode 100644 index 0000000000000000000000000000000000000000..b833ac483f349f39e38d8925ffc45ecf203e94b1 --- /dev/null +++ b/frontend/src/app/(dashboard)/knowledge-graph/components/GraphControls.tsx @@ -0,0 +1,164 @@ +'use client' + +import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card' +import { Button } from '@/components/ui/button' +import { Badge } from '@/components/ui/badge' +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select' +import { + useKnowledgeGraphStatus, + useBuildKnowledgeGraph, + useDeleteKnowledgeGraph +} from '@/lib/hooks/use-knowledge-graph' +import { LoadingSpinner } from '@/components/common/LoadingSpinner' +import { + RefreshCw, + Trash2, + Clock, + CheckCircle2, + AlertCircle, + Loader2, + Network +} from 'lucide-react' +import { formatDistanceToNow } from 'date-fns' +import type { NotebookResponse } from '@/lib/types/api' +import type { BuildStatus } from '@/lib/types/knowledge-graph' + +interface GraphControlsProps { + notebooks: NotebookResponse[] + selectedNotebookId: string | null + onSelectNotebook: (id: string | null) => void +} + +const statusConfig: Record = { + not_built: { label: 'Not Built', icon: , color: 'bg-gray-500' }, + pending: { label: 'Pending', icon: , color: 'bg-yellow-500' }, + building: { label: 'Building', icon: , color: 'bg-blue-500' }, + completed: { label: 'Completed', icon: , color: 'bg-green-500' }, + error: { label: 'Error', icon: , color: 'bg-red-500' }, +} + +export function GraphControls({ notebooks, selectedNotebookId, onSelectNotebook }: GraphControlsProps) { + const { data: status } = useKnowledgeGraphStatus(selectedNotebookId) + const buildGraph = useBuildKnowledgeGraph() + const deleteGraph = useDeleteKnowledgeGraph() + + const handleBuild = () => { + if (selectedNotebookId) { + buildGraph.mutate({ notebook_id: selectedNotebookId }) + } + } + + const handleDelete = () => { + if (selectedNotebookId) { + deleteGraph.mutate(selectedNotebookId) + } + } + + const currentStatus = status?.build_status || 'not_built' + const statusInfo = statusConfig[currentStatus] + + return ( + + + Graph Settings + + +
+ {/* Notebook Selector */} +
+ + +
+ + {/* Status */} + {selectedNotebookId && ( +
+
+ + + {statusInfo.icon} + {statusInfo.label} + +
+ + {status && status.node_count > 0 && ( +
+
{status.node_count} nodes
+
{status.edge_count} edges
+
+ )} + + {status?.last_built && ( +
+ Last built: {formatDistanceToNow(new Date(status.last_built), { addSuffix: true })} +
+ )} +
+ )} +
+ + {/* Actions */} + {selectedNotebookId && ( +
+ + + {status && status.node_count > 0 && ( + + )} +
+ )} + + {/* Error Message */} + {status?.error_message && ( +
+ {status.error_message} +
+ )} +
+
+ ) +} diff --git a/frontend/src/app/(dashboard)/knowledge-graph/components/KnowledgeGraphInsightsPanel.tsx b/frontend/src/app/(dashboard)/knowledge-graph/components/KnowledgeGraphInsightsPanel.tsx new file mode 100644 index 0000000000000000000000000000000000000000..51d7d2242c917d2d98d47d96bd67208dde4bea77 --- /dev/null +++ b/frontend/src/app/(dashboard)/knowledge-graph/components/KnowledgeGraphInsightsPanel.tsx @@ -0,0 +1,375 @@ +'use client' + +import { useState } from 'react' +import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card' +import { Button } from '@/components/ui/button' +import { Textarea } from '@/components/ui/textarea' +import { Badge } from '@/components/ui/badge' +import { ScrollArea } from '@/components/ui/scroll-area' +import { Separator } from '@/components/ui/separator' +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select' +import { useAsk } from '@/lib/hooks/use-ask' +import { useModels } from '@/lib/hooks/use-models' +import { LoadingSpinner } from '@/components/common/LoadingSpinner' +import { + Sparkles, + Brain, + Lightbulb, + Network +} from 'lucide-react' +import ReactMarkdown from 'react-markdown' + +interface KnowledgeGraphInsightsPanelProps { + notebookId: string + graphStats: { + nodeCount: number + edgeCount: number + topConcepts?: Array<{ + label: string + mentions: number + type?: string + importance?: number + }> + nodeTypes?: Record + relationshipTypes?: Record + allNodes?: Array<{ + label: string + type: string + mentions: number + }> + } +} + +export function KnowledgeGraphInsightsPanel({ + notebookId, + graphStats +}: KnowledgeGraphInsightsPanelProps) { + const [question, setQuestion] = useState('') + const [selectedStrategyModel, setSelectedStrategyModel] = useState('') + const [selectedAnswerModel, setSelectedAnswerModel] = useState('') + const [selectedFinalModel, setSelectedFinalModel] = useState('') + + const askHook = useAsk() + const askState = { + isStreaming: askHook.isStreaming, + strategy: askHook.strategy, + answers: askHook.answers, + finalAnswer: askHook.finalAnswer, + error: askHook.error + } + const { sendAsk, sendDirectAsk } = askHook + const { data: models, isLoading: modelsLoading } = useModels() + + const handleAsk = async () => { + if (!question.trim()) return + + // Build context from knowledge graph + const graphContext = buildGraphContext() + const contextualQuestion = `${graphContext}\n\nQuestion: ${question}` + + if (!selectedStrategyModel || !selectedAnswerModel || !selectedFinalModel) { + // Use first available model as default + const defaultModel = models?.[0]?.id + await sendAsk(contextualQuestion, { + strategy: selectedStrategyModel || defaultModel || 'gpt-4o', + answer: selectedAnswerModel || defaultModel || 'gpt-4o', + finalAnswer: selectedFinalModel || defaultModel || 'gpt-4o', + }) + } else { + await sendAsk(contextualQuestion, { + strategy: selectedStrategyModel, + answer: selectedAnswerModel, + finalAnswer: selectedFinalModel, + }) + } + } + + const handleDirectAsk = async () => { + if (!question.trim()) return + + // Build context from knowledge graph + const graphContext = buildGraphContext() + const contextualQuestion = `${graphContext}\n\nQuestion: ${question}` + + await sendDirectAsk(contextualQuestion, selectedAnswerModel) + } + + // Build context string from graph statistics + const buildGraphContext = () => { + const { nodeCount, edgeCount, topConcepts, nodeTypes, relationshipTypes, allNodes } = graphStats + + let context = `You are analyzing a Knowledge Graph with the following structure:\n\n` + context += `## Graph Statistics:\n` + context += `- Total Nodes: ${nodeCount}\n` + context += `- Total Connections: ${edgeCount}\n\n` + + // Node types breakdown + if (nodeTypes && Object.keys(nodeTypes).length > 0) { + context += `## Node Types Distribution:\n` + Object.entries(nodeTypes).forEach(([type, count]) => { + context += `- ${type}: ${count} nodes\n` + }) + context += `\n` + } + + // Top concepts with details + if (topConcepts && topConcepts.length > 0) { + context += `## Top ${Math.min(10, topConcepts.length)} Most Important Concepts:\n` + topConcepts.forEach((concept, index) => { + context += `${index + 1}. **${concept.label}**` + if (concept.type) context += ` [${concept.type}]` + context += ` - ${concept.mentions} mentions` + if (concept.importance) context += ` (importance: ${Math.round(concept.importance * 100)}%)` + context += `\n` + }) + context += `\n` + } + + // Relationship types + if (relationshipTypes && Object.keys(relationshipTypes).length > 0) { + context += `## Relationship Types:\n` + Object.entries(relationshipTypes) + .sort(([, a], [, b]) => b - a) + .slice(0, 10) + .forEach(([type, count]) => { + context += `- ${type}: ${count} connections\n` + }) + context += `\n` + } + + // All concepts (for comprehensive understanding) + if (allNodes && allNodes.length > 0 && allNodes.length <= 50) { + context += `## All Concepts in Graph:\n` + allNodes.forEach(node => { + context += `- ${node.label} [${node.type}]\n` + }) + context += `\n` + } + + context += `Please answer the following question based on this knowledge graph structure and content.\n` + + return context + } + + const suggestedQuestions = [ + "What are the main concepts in this knowledge graph?", + "How are the key ideas connected?", + "Summarize the relationships between the top concepts", + "What patterns can you identify in the graph?", + "Explain the most important nodes and their connections" + ] + + return ( + + +
+ + + Knowledge Graph Insights + + + + {graphStats.nodeCount} nodes + +
+

+ Ask AI questions about your knowledge graph +

+
+ + {/* Quick Stats */} +
+
+
{graphStats.nodeCount}
+
Nodes
+
+
+
{graphStats.edgeCount}
+
Connections
+
+
+
+ {graphStats.topConcepts?.length || 0} +
+
Top Concepts
+
+
+ + + + {/* Ask AI Section */} +
+ {/* Model Selection */} +
+ + +
+ + {/* Question Input */} +
+ +