diff --git a/.env b/.env
index 6dbec16f5dbb73099551b9151d2c8cf775ab0d07..38ce6ca72b243a87a50f37456a64b3d837bd85d5 100644
--- a/.env
+++ b/.env
@@ -34,7 +34,7 @@ COUPLE_SESSION_WITH_COOKIE_NAME=
# when OPEN_ID is configured, users are required to login after the welcome modal
OPENID_CLIENT_ID=
OPENID_CLIENT_SECRET=
-OPENID_SCOPES="openid profile inference-api"
+OPENID_SCOPES="openid profile inference-api read-mcp"
USE_USER_TOKEN=
AUTOMATIC_LOGIN=# if true authentication is required on all routes
@@ -73,10 +73,15 @@ LLM_ROUTER_MAX_ASSISTANT_LENGTH=500
LLM_ROUTER_MAX_PREV_USER_LENGTH=400
# Enable router multimodal fallback (set to true to allow image inputs via router)
-LLM_ROUTER_ENABLE_MULTIMODAL=false
+LLM_ROUTER_ENABLE_MULTIMODAL=
# Optional: specific model to use for multimodal requests. If not set, uses first multimodal model
LLM_ROUTER_MULTIMODAL_MODEL=
+# Enable router tool support (set to true to allow tool calling via router)
+LLM_ROUTER_ENABLE_TOOLS=
+# Required when tools are active: id or name of the model to use for MCP tool calls.
+LLM_ROUTER_TOOLS_MODEL=
+
# Router UI overrides (client-visible)
# Public display name for the router entry in the model list. Defaults to "Omni".
PUBLIC_LLM_ROUTER_DISPLAY_NAME=Omni
@@ -113,6 +118,11 @@ ADMIN_TOKEN=#We recommend leaving this empty, you can get the token from the ter
LLM_SUMMARIZATION=true # generate conversation titles with LLMs
ALLOW_IFRAME=true # Allow the app to be embedded in an iframe
+
+# Base servers list (JSON array). Example: MCP_SERVERS=[{"name": "Web Search (Exa)", "url": "https://mcp.exa.ai/mcp"}, {"name": "Hugging Face", "url": "https://huggingface.co/mcp"}]
+MCP_SERVERS=
+# When true, forward the logged-in user's Hugging Face access token
+MCP_FORWARD_HF_USER_TOKEN=
ENABLE_DATA_EXPORT=true
### Rate limits ###
diff --git a/.github/workflows/slugify.yaml b/.github/workflows/slugify.yaml
index 8f63c34e437d12ec4c58df79911a7a655ba9eece..3a0573a4317505b18871e13573c6ce75f1b83b20 100644
--- a/.github/workflows/slugify.yaml
+++ b/.github/workflows/slugify.yaml
@@ -4,12 +4,12 @@ on:
workflow_call:
inputs:
value:
- description: 'Value to slugify'
+ description: "Value to slugify"
required: true
type: string
outputs:
slug:
- description: 'Slugified value'
+ description: "Slugified value"
value: ${{ jobs.generate-slug.outputs.slug }}
jobs:
@@ -22,7 +22,7 @@ jobs:
- name: Setup Go
uses: actions/setup-go@v5
with:
- go-version: '1.21'
+ go-version: "1.21"
- name: Generate slug
id: slugify
@@ -30,43 +30,43 @@ jobs:
# Create working directory
mkdir -p $HOME/slugify
cd $HOME/slugify
-
+
# Create Go script
cat > main.go << 'EOF'
package main
-
+
import (
"fmt"
"os"
"github.com/gosimple/slug"
)
-
+
func main() {
if len(os.Args) < 2 {
fmt.Println("Usage: slugify ")
os.Exit(1)
}
-
+
text := os.Args[1]
slugged := slug.Make(text)
fmt.Println(slugged)
}
EOF
-
+
# Initialize module and install dependency
go mod init slugify
go mod tidy
go get github.com/gosimple/slug
-
+
# Build
go build -o slugify main.go
-
+
# Generate slug
VALUE="${{ inputs.value }}"
echo "Input value: $VALUE"
-
+
SLUG=$(./slugify "$VALUE")
echo "Generated slug: $SLUG"
-
+
# Export
- echo "slug=$SLUG" >> $GITHUB_OUTPUT
\ No newline at end of file
+ echo "slug=$SLUG" >> $GITHUB_OUTPUT
diff --git a/README.md b/README.md
index d2c95061427e180d2b0c9c40812be87b6be0b830..2e4261315f967b7cc82ff460a9966cc96a7ff54d 100644
--- a/README.md
+++ b/README.md
@@ -142,6 +142,43 @@ When you select Omni in the UI, Chat UI will:
- Emit RouterMetadata immediately (route and actual model used) so the UI can display it.
- Stream from the selected model via your configured `OPENAI_BASE_URL`. On errors, it tries route fallbacks.
+Tool and multimodal shortcuts:
+
+- Multimodal: If `LLM_ROUTER_ENABLE_MULTIMODAL=true` and the user sends an image, the router bypasses Arch and uses `LLM_ROUTER_MULTIMODAL_MODEL` (or the first multimodal model). Route name: `multimodal`.
+- Tools: If `LLM_ROUTER_ENABLE_TOOLS=true` and the user has at least one MCP server enabled, the router bypasses Arch and uses `LLM_ROUTER_TOOLS_MODEL`. If that model is missing or misconfigured, it falls back to Arch routing. Route name: `agentic`.
+
+### MCP Tools (Optional)
+
+Chat UI can call tools exposed by Model Context Protocol (MCP) servers and feed results back to the model using OpenAI function calling. You can preconfigure trusted servers via env, let users add their own, and optionally have the Omni router auto‑select a tools‑capable model.
+
+Configure servers (base list for all users):
+
+```env
+# JSON array of servers: name, url, optional headers
+MCP_SERVERS=[
+ {"name": "Web Search (Exa)", "url": "https://mcp.exa.ai/mcp"},
+ {"name": "Hugging Face MCP Login", "url": "https://huggingface.co/mcp?login"}
+]
+
+# Forward the signed-in user's Hugging Face token to the official HF MCP login endpoint
+# when no Authorization header is set on that server entry.
+MCP_FORWARD_HF_USER_TOKEN=true
+```
+
+Enable router tool path (Omni):
+
+- Set `LLM_ROUTER_ENABLE_TOOLS=true` and choose a tools‑capable target with `LLM_ROUTER_TOOLS_MODEL=`.
+- The target must support OpenAI tools/function calling. Chat UI surfaces a “tools” badge on models that advertise this; you can also force‑enable it per‑model in settings (see below).
+
+Use tools in the UI:
+
+- Open “MCP Servers” from the top‑right menu or from the `+` menu in the chat input to add servers, toggle them on, and run Health Check. The server card lists available tools.
+- When a model calls a tool, the message shows a compact “tool” block with parameters, a progress bar while running, and the result (or error). Results are also provided back to the model for follow‑up.
+
+Per‑model overrides:
+
+- In Settings → Model, you can toggle “Tool calling (functions)” and “Multimodal input” per model. These overrides apply even if the provider metadata doesn’t advertise the capability.
+
## Building
To create a production version of your app:
diff --git a/chart/env/dev.yaml b/chart/env/dev.yaml
index f3a02c79954d1d48964d0ece95dae8c3f1149042..72dc98b9fd01d1dffc39fe47d7d245b53fa0cb3b 100644
--- a/chart/env/dev.yaml
+++ b/chart/env/dev.yaml
@@ -38,8 +38,9 @@ ingressInternal:
envVars:
TEST: "test"
COUPLE_SESSION_WITH_COOKIE_NAME: "token"
- OPENID_SCOPES: "openid profile inference-api"
+ OPENID_SCOPES: "openid profile inference-api read-mcp"
USE_USER_TOKEN: "true"
+ MCP_FORWARD_HF_USER_TOKEN: "true"
AUTOMATIC_LOGIN: "false"
ADDRESS_HEADER: "X-Forwarded-For"
@@ -67,6 +68,10 @@ envVars:
LLM_ROUTER_ARCH_TIMEOUT_MS: "10000"
LLM_ROUTER_ENABLE_MULTIMODAL: "true"
LLM_ROUTER_MULTIMODAL_MODEL: "Qwen/Qwen3-VL-235B-A22B-Thinking"
+ LLM_ROUTER_ENABLE_TOOLS: "true"
+ LLM_ROUTER_TOOLS_MODEL: "moonshotai/Kimi-K2-Instruct-0905"
+ MCP_SERVERS: >
+ [{"name": "Web Search (Exa)", "url": "https://mcp.exa.ai/mcp"}, {"name": "Hugging Face", "url": "https://huggingface.co/mcp?login"}]
PUBLIC_LLM_ROUTER_DISPLAY_NAME: "Omni"
PUBLIC_LLM_ROUTER_LOGO_URL: "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/C5V0v1xZXv6M7FXsdJH9b.png"
PUBLIC_LLM_ROUTER_ALIAS_ID: "omni"
diff --git a/chart/env/prod.yaml b/chart/env/prod.yaml
index bfcc28d0987fd25756921571520282e981a86403..d461a7efe94c55475bd769b0373badf938132bba 100644
--- a/chart/env/prod.yaml
+++ b/chart/env/prod.yaml
@@ -48,8 +48,9 @@ ingressInternal:
envVars:
COUPLE_SESSION_WITH_COOKIE_NAME: "token"
- OPENID_SCOPES: "openid profile inference-api"
+ OPENID_SCOPES: "openid profile inference-api read-mcp"
USE_USER_TOKEN: "true"
+ MCP_FORWARD_HF_USER_TOKEN: "true"
AUTOMATIC_LOGIN: "false"
ADDRESS_HEADER: "X-Forwarded-For"
@@ -76,7 +77,11 @@ envVars:
LLM_ROUTER_OTHER_ROUTE: "casual_conversation"
LLM_ROUTER_ARCH_TIMEOUT_MS: "10000"
LLM_ROUTER_ENABLE_MULTIMODAL: "true"
- LLM_ROUTER_MULTIMODAL_MODEL: "Qwen/Qwen3-VL-235B-A22B-Thinking"
+ LLM_ROUTER_MULTIMODAL_MODEL: "Qwen/Qwen3-VL-30B-A3B-Instruct"
+ LLM_ROUTER_ENABLE_TOOLS: "true"
+ LLM_ROUTER_TOOLS_MODEL: "moonshotai/Kimi-K2-Instruct-0905"
+ MCP_SERVERS: >
+ [{"name": "Web Search (Exa)", "url": "https://mcp.exa.ai/mcp"}, {"name": "Hugging Face", "url": "https://huggingface.co/mcp?login"}]
PUBLIC_LLM_ROUTER_DISPLAY_NAME: "Omni"
PUBLIC_LLM_ROUTER_LOGO_URL: "https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/C5V0v1xZXv6M7FXsdJH9b.png"
PUBLIC_LLM_ROUTER_ALIAS_ID: "omni"
diff --git a/package-lock.json b/package-lock.json
index 687c65f1a6569522ab639d95dbaaf225bacb6093..6e5e005bb00b831402158bc07c12f0ce96a7ab52 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -13,6 +13,7 @@
"@huggingface/hub": "^2.2.0",
"@huggingface/inference": "^4.11.3",
"@iconify-json/bi": "^1.1.21",
+ "@modelcontextprotocol/sdk": "^1.21.1",
"@resvg/resvg-js": "^2.6.2",
"autoprefixer": "^10.4.14",
"aws4": "^1.13.2",
@@ -396,24 +397,24 @@
}
},
"node_modules/@aws-sdk/client-cognito-identity": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.925.0.tgz",
- "integrity": "sha512-7koO8MTU6T0dKAaFi7Bm06t4l8M9z798WSvpwzcCVItf6UAj+popz5MKzomxpd4Ire7C1jqqponiM8rrxNyYcQ==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.927.0.tgz",
+ "integrity": "sha512-nt6qcS94C88jV3ZVzc7nG4ew4Wrbi27UsYFB8OpvLNFSXOTWx3Sd7g7xn6FyRFBM6QH+zijqgQ6lpKIMQdm9+w==",
"license": "Apache-2.0",
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
- "@aws-sdk/core": "3.922.0",
- "@aws-sdk/credential-provider-node": "3.925.0",
+ "@aws-sdk/core": "3.927.0",
+ "@aws-sdk/credential-provider-node": "3.927.0",
"@aws-sdk/middleware-host-header": "3.922.0",
"@aws-sdk/middleware-logger": "3.922.0",
"@aws-sdk/middleware-recursion-detection": "3.922.0",
- "@aws-sdk/middleware-user-agent": "3.922.0",
+ "@aws-sdk/middleware-user-agent": "3.927.0",
"@aws-sdk/region-config-resolver": "3.925.0",
"@aws-sdk/types": "3.922.0",
"@aws-sdk/util-endpoints": "3.922.0",
"@aws-sdk/util-user-agent-browser": "3.922.0",
- "@aws-sdk/util-user-agent-node": "3.922.0",
+ "@aws-sdk/util-user-agent-node": "3.927.0",
"@smithy/config-resolver": "^4.4.2",
"@smithy/core": "^3.17.2",
"@smithy/fetch-http-handler": "^5.3.5",
@@ -446,23 +447,23 @@
}
},
"node_modules/@aws-sdk/client-sso": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.925.0.tgz",
- "integrity": "sha512-ixC9CyXe/mBo1X+bzOxIIzsdBYzM+klWoHUYzwnPMrXhpDrMjj8D24R/FPqrDnhoYYXiyS4BApRLpeymsFJq2Q==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.927.0.tgz",
+ "integrity": "sha512-O+e+jo6ei7U/BA7lhT4mmPCWmeR9dFgGUHVwCwJ5c/nCaSaHQ+cb7j2h8WPXERu0LhPSFyj1aD5dk3jFIwNlbg==",
"license": "Apache-2.0",
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
- "@aws-sdk/core": "3.922.0",
+ "@aws-sdk/core": "3.927.0",
"@aws-sdk/middleware-host-header": "3.922.0",
"@aws-sdk/middleware-logger": "3.922.0",
"@aws-sdk/middleware-recursion-detection": "3.922.0",
- "@aws-sdk/middleware-user-agent": "3.922.0",
+ "@aws-sdk/middleware-user-agent": "3.927.0",
"@aws-sdk/region-config-resolver": "3.925.0",
"@aws-sdk/types": "3.922.0",
"@aws-sdk/util-endpoints": "3.922.0",
"@aws-sdk/util-user-agent-browser": "3.922.0",
- "@aws-sdk/util-user-agent-node": "3.922.0",
+ "@aws-sdk/util-user-agent-node": "3.927.0",
"@smithy/config-resolver": "^4.4.2",
"@smithy/core": "^3.17.2",
"@smithy/fetch-http-handler": "^5.3.5",
@@ -495,9 +496,9 @@
}
},
"node_modules/@aws-sdk/core": {
- "version": "3.922.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.922.0.tgz",
- "integrity": "sha512-EvfP4cqJfpO3L2v5vkIlTkMesPtRwWlMfsaW6Tpfm7iYfBOuTi6jx60pMDMTyJNVfh6cGmXwh/kj1jQdR+w99Q==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.927.0.tgz",
+ "integrity": "sha512-QOtR9QdjNeC7bId3fc/6MnqoEezvQ2Fk+x6F+Auf7NhOxwYAtB1nvh0k3+gJHWVGpfxN1I8keahRZd79U68/ag==",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/types": "3.922.0",
@@ -519,12 +520,12 @@
}
},
"node_modules/@aws-sdk/credential-provider-cognito-identity": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.925.0.tgz",
- "integrity": "sha512-hSA6PE/u+DYYJVJ01cyKiDR3d31kOJ1l+qJJimEiG+jH1K+EUgjhNVZKHUzEbumVvpWVHeZJ7Hs6iq4F/rS4+g==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.927.0.tgz",
+ "integrity": "sha512-zV6w71IT+7rTUiIBIdzHt0aDkYA0NckZHr97/O6qcp0qm3mIj8oiDjHo6sD8qLAVT2ixmAhuBuZ8DAkMHjZ0wA==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/client-cognito-identity": "3.925.0",
+ "@aws-sdk/client-cognito-identity": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/property-provider": "^4.2.4",
"@smithy/types": "^4.8.1",
@@ -535,12 +536,12 @@
}
},
"node_modules/@aws-sdk/credential-provider-env": {
- "version": "3.922.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.922.0.tgz",
- "integrity": "sha512-WikGQpKkROJSK3D3E7odPjZ8tU7WJp5/TgGdRuZw3izsHUeH48xMv6IznafpRTmvHcjAbDQj4U3CJZNAzOK/OQ==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.927.0.tgz",
+ "integrity": "sha512-bAllBpmaWINpf0brXQWh/hjkBctapknZPYb3FJRlBHytEGHi7TpgqBXi8riT0tc6RVWChhnw58rQz22acOmBuw==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
+ "@aws-sdk/core": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/property-provider": "^4.2.4",
"@smithy/types": "^4.8.1",
@@ -551,12 +552,12 @@
}
},
"node_modules/@aws-sdk/credential-provider-http": {
- "version": "3.922.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.922.0.tgz",
- "integrity": "sha512-i72DgHMK7ydAEqdzU0Duqh60Q8W59EZmRJ73y0Y5oFmNOqnYsAI+UXyOoCsubp+Dkr6+yOwAn1gPt1XGE9Aowg==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.927.0.tgz",
+ "integrity": "sha512-jEvb8C7tuRBFhe8vZY9vm9z6UQnbP85IMEt3Qiz0dxAd341Hgu0lOzMv5mSKQ5yBnTLq+t3FPKgD9tIiHLqxSQ==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
+ "@aws-sdk/core": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/fetch-http-handler": "^5.3.5",
"@smithy/node-http-handler": "^4.4.4",
@@ -572,18 +573,18 @@
}
},
"node_modules/@aws-sdk/credential-provider-ini": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.925.0.tgz",
- "integrity": "sha512-TOs/UkKWwXrSPolRTChpDUQjczw6KqbbanF0EzjUm3sp/AS1ThOQCKuTTdaOBZXkCIJdvRmZjF3adccE3rAoXg==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.927.0.tgz",
+ "integrity": "sha512-WvliaKYT7bNLiryl/FsZyUwRGBo/CWtboekZWvSfloAb+0SKFXWjmxt3z+Y260aoaPm/LIzEyslDHfxqR9xCJQ==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
- "@aws-sdk/credential-provider-env": "3.922.0",
- "@aws-sdk/credential-provider-http": "3.922.0",
- "@aws-sdk/credential-provider-process": "3.922.0",
- "@aws-sdk/credential-provider-sso": "3.925.0",
- "@aws-sdk/credential-provider-web-identity": "3.925.0",
- "@aws-sdk/nested-clients": "3.925.0",
+ "@aws-sdk/core": "3.927.0",
+ "@aws-sdk/credential-provider-env": "3.927.0",
+ "@aws-sdk/credential-provider-http": "3.927.0",
+ "@aws-sdk/credential-provider-process": "3.927.0",
+ "@aws-sdk/credential-provider-sso": "3.927.0",
+ "@aws-sdk/credential-provider-web-identity": "3.927.0",
+ "@aws-sdk/nested-clients": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/credential-provider-imds": "^4.2.4",
"@smithy/property-provider": "^4.2.4",
@@ -596,17 +597,17 @@
}
},
"node_modules/@aws-sdk/credential-provider-node": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.925.0.tgz",
- "integrity": "sha512-+T9mnnTY73MLkVxsk5RtzE4fv7GnMhR7iXhL/yTusf1zLfA09uxlA9VCz6tWxm5rHcO4ZN0x4hnqqDhM+DB5KQ==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.927.0.tgz",
+ "integrity": "sha512-M6BLrI+WHQ7PUY1aYu2OkI/KEz9aca+05zyycACk7cnlHlZaQ3vTFd0xOqF+A1qaenQBuxApOTs7Z21pnPUo9Q==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/credential-provider-env": "3.922.0",
- "@aws-sdk/credential-provider-http": "3.922.0",
- "@aws-sdk/credential-provider-ini": "3.925.0",
- "@aws-sdk/credential-provider-process": "3.922.0",
- "@aws-sdk/credential-provider-sso": "3.925.0",
- "@aws-sdk/credential-provider-web-identity": "3.925.0",
+ "@aws-sdk/credential-provider-env": "3.927.0",
+ "@aws-sdk/credential-provider-http": "3.927.0",
+ "@aws-sdk/credential-provider-ini": "3.927.0",
+ "@aws-sdk/credential-provider-process": "3.927.0",
+ "@aws-sdk/credential-provider-sso": "3.927.0",
+ "@aws-sdk/credential-provider-web-identity": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/credential-provider-imds": "^4.2.4",
"@smithy/property-provider": "^4.2.4",
@@ -619,12 +620,12 @@
}
},
"node_modules/@aws-sdk/credential-provider-process": {
- "version": "3.922.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.922.0.tgz",
- "integrity": "sha512-1DZOYezT6okslpvMW7oA2q+y17CJd4fxjNFH0jtThfswdh9CtG62+wxenqO+NExttq0UMaKisrkZiVrYQBTShw==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.927.0.tgz",
+ "integrity": "sha512-rvqdZIN3TRhLKssufN5G2EWLMBct3ZebOBdwr0tuOoPEdaYflyXYYUScu+Beb541CKfXaFnEOlZokq12r7EPcQ==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
+ "@aws-sdk/core": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/property-provider": "^4.2.4",
"@smithy/shared-ini-file-loader": "^4.3.4",
@@ -636,14 +637,14 @@
}
},
"node_modules/@aws-sdk/credential-provider-sso": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.925.0.tgz",
- "integrity": "sha512-aZlUC6LRsOMDvIu0ifF62mTjL3KGzclWu5XBBN8eLDAYTdhqMxv3HyrqWoiHnGZnZGaVU+II+qsVoeBnGOwHow==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.927.0.tgz",
+ "integrity": "sha512-XrCuncze/kxZE6WYEWtNMGtrJvJtyhUqav4xQQ9PJcNjxCUYiIRv7Gwkt7cuwJ1HS+akQj+JiZmljAg97utfDw==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/client-sso": "3.925.0",
- "@aws-sdk/core": "3.922.0",
- "@aws-sdk/token-providers": "3.925.0",
+ "@aws-sdk/client-sso": "3.927.0",
+ "@aws-sdk/core": "3.927.0",
+ "@aws-sdk/token-providers": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/property-provider": "^4.2.4",
"@smithy/shared-ini-file-loader": "^4.3.4",
@@ -655,13 +656,13 @@
}
},
"node_modules/@aws-sdk/credential-provider-web-identity": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.925.0.tgz",
- "integrity": "sha512-dR34s8Sfd1wJBzIuvRFO2FCnLmYD8iwPWrdXWI2ZypFt1EQR8jeQ20mnS+UOCoR5Z0tY6wJqEgTXKl4KuZ+DUg==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.927.0.tgz",
+ "integrity": "sha512-Oh/aFYjZQsIiZ2PQEgTNvqEE/mmOYxZKZzXV86qrU3jBUfUUBvprUZc684nBqJbSKPwM5jCZtxiRYh+IrZDE7A==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
- "@aws-sdk/nested-clients": "3.925.0",
+ "@aws-sdk/core": "3.927.0",
+ "@aws-sdk/nested-clients": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/property-provider": "^4.2.4",
"@smithy/shared-ini-file-loader": "^4.3.4",
@@ -673,22 +674,22 @@
}
},
"node_modules/@aws-sdk/credential-providers": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.925.0.tgz",
- "integrity": "sha512-CTTFn+8NiXRoyKbaTKXCSZ9pUs3R3HllTgl2In8Mxl60Eim9QrP3QYbSjH+pqaIOf1qhbe1UuEICzGrO3Y+8MA==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.927.0.tgz",
+ "integrity": "sha512-CasoHKKE/K+6YcVqjE+v5dVyKqKBtfzZyvGi669HvJ1f4EPHbVRPPLIb0eAYd/aEmwHsB/nn9VnyN9Wq5OppUQ==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/client-cognito-identity": "3.925.0",
- "@aws-sdk/core": "3.922.0",
- "@aws-sdk/credential-provider-cognito-identity": "3.925.0",
- "@aws-sdk/credential-provider-env": "3.922.0",
- "@aws-sdk/credential-provider-http": "3.922.0",
- "@aws-sdk/credential-provider-ini": "3.925.0",
- "@aws-sdk/credential-provider-node": "3.925.0",
- "@aws-sdk/credential-provider-process": "3.922.0",
- "@aws-sdk/credential-provider-sso": "3.925.0",
- "@aws-sdk/credential-provider-web-identity": "3.925.0",
- "@aws-sdk/nested-clients": "3.925.0",
+ "@aws-sdk/client-cognito-identity": "3.927.0",
+ "@aws-sdk/core": "3.927.0",
+ "@aws-sdk/credential-provider-cognito-identity": "3.927.0",
+ "@aws-sdk/credential-provider-env": "3.927.0",
+ "@aws-sdk/credential-provider-http": "3.927.0",
+ "@aws-sdk/credential-provider-ini": "3.927.0",
+ "@aws-sdk/credential-provider-node": "3.927.0",
+ "@aws-sdk/credential-provider-process": "3.927.0",
+ "@aws-sdk/credential-provider-sso": "3.927.0",
+ "@aws-sdk/credential-provider-web-identity": "3.927.0",
+ "@aws-sdk/nested-clients": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/config-resolver": "^4.4.2",
"@smithy/core": "^3.17.2",
@@ -748,12 +749,12 @@
}
},
"node_modules/@aws-sdk/middleware-user-agent": {
- "version": "3.922.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.922.0.tgz",
- "integrity": "sha512-N4Qx/9KP3oVQBJOrSghhz8iZFtUC2NNeSZt88hpPhbqAEAtuX8aD8OzVcpnAtrwWqy82Yd2YTxlkqMGkgqnBsQ==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.927.0.tgz",
+ "integrity": "sha512-sv6St9EgEka6E7y19UMCsttFBZ8tsmz2sstgRd7LztlX3wJynpeDUhq0gtedguG1lGZY/gDf832k5dqlRLUk7g==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
+ "@aws-sdk/core": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@aws-sdk/util-endpoints": "3.922.0",
"@smithy/core": "^3.17.2",
@@ -766,23 +767,23 @@
}
},
"node_modules/@aws-sdk/nested-clients": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.925.0.tgz",
- "integrity": "sha512-Fc8QhH+1YzGQb5aWQUX6gRnKSzUZ9p3p/muqXIgYBL8RSd5O6hSPhDTyrOWE247zFlOjVlAlEnoTMJKarH0cIA==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.927.0.tgz",
+ "integrity": "sha512-Oy6w7+fzIdr10DhF/HpfVLy6raZFTdiE7pxS1rvpuj2JgxzW2y6urm2sYf3eLOpMiHyuG4xUBwFiJpU9CCEvJA==",
"license": "Apache-2.0",
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
- "@aws-sdk/core": "3.922.0",
+ "@aws-sdk/core": "3.927.0",
"@aws-sdk/middleware-host-header": "3.922.0",
"@aws-sdk/middleware-logger": "3.922.0",
"@aws-sdk/middleware-recursion-detection": "3.922.0",
- "@aws-sdk/middleware-user-agent": "3.922.0",
+ "@aws-sdk/middleware-user-agent": "3.927.0",
"@aws-sdk/region-config-resolver": "3.925.0",
"@aws-sdk/types": "3.922.0",
"@aws-sdk/util-endpoints": "3.922.0",
"@aws-sdk/util-user-agent-browser": "3.922.0",
- "@aws-sdk/util-user-agent-node": "3.922.0",
+ "@aws-sdk/util-user-agent-node": "3.927.0",
"@smithy/config-resolver": "^4.4.2",
"@smithy/core": "^3.17.2",
"@smithy/fetch-http-handler": "^5.3.5",
@@ -831,13 +832,13 @@
}
},
"node_modules/@aws-sdk/token-providers": {
- "version": "3.925.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.925.0.tgz",
- "integrity": "sha512-F4Oibka1W5YYDeL+rGt/Hg3NLjOzrJdmuZOE0OFQt/U6dnJwYmYi2gFqduvZnZcD1agNm37mh7/GUq1zvKS6ig==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.927.0.tgz",
+ "integrity": "sha512-JRdaprkZjZ6EY4WVwsZaEjPUj9W9vqlSaFDm4oD+IbwlY4GjAXuUQK6skKcvVyoOsSTvJp/CaveSws2FiWUp9Q==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/core": "3.922.0",
- "@aws-sdk/nested-clients": "3.925.0",
+ "@aws-sdk/core": "3.927.0",
+ "@aws-sdk/nested-clients": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/property-provider": "^4.2.4",
"@smithy/shared-ini-file-loader": "^4.3.4",
@@ -902,12 +903,12 @@
}
},
"node_modules/@aws-sdk/util-user-agent-node": {
- "version": "3.922.0",
- "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.922.0.tgz",
- "integrity": "sha512-NrPe/Rsr5kcGunkog0eBV+bY0inkRELsD2SacC4lQZvZiXf8VJ2Y7j+Yq1tB+h+FPLsdt3v9wItIvDf/laAm0Q==",
+ "version": "3.927.0",
+ "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.927.0.tgz",
+ "integrity": "sha512-5Ty+29jBTHg1mathEhLJavzA7A7vmhephRYGenFzo8rApLZh+c+MCAqjddSjdDzcf5FH+ydGGnIrj4iIfbZIMQ==",
"license": "Apache-2.0",
"dependencies": {
- "@aws-sdk/middleware-user-agent": "3.922.0",
+ "@aws-sdk/middleware-user-agent": "3.927.0",
"@aws-sdk/types": "3.922.0",
"@smithy/node-config-provider": "^4.3.4",
"@smithy/types": "^4.8.1",
@@ -2421,6 +2422,60 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
+ "node_modules/@modelcontextprotocol/sdk": {
+ "version": "1.21.1",
+ "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.21.1.tgz",
+ "integrity": "sha512-UyLFcJLDvUuZbGnaQqXFT32CpPpGj7VS19roLut6gkQVhb439xUzYWbsUvdI3ZPL+2hnFosuugtYWE0Mcs1rmQ==",
+ "license": "MIT",
+ "dependencies": {
+ "ajv": "^8.17.1",
+ "ajv-formats": "^3.0.1",
+ "content-type": "^1.0.5",
+ "cors": "^2.8.5",
+ "cross-spawn": "^7.0.5",
+ "eventsource": "^3.0.2",
+ "eventsource-parser": "^3.0.0",
+ "express": "^5.0.1",
+ "express-rate-limit": "^7.5.0",
+ "pkce-challenge": "^5.0.0",
+ "raw-body": "^3.0.0",
+ "zod": "^3.23.8",
+ "zod-to-json-schema": "^3.24.1"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@cfworker/json-schema": "^4.1.1"
+ },
+ "peerDependenciesMeta": {
+ "@cfworker/json-schema": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@modelcontextprotocol/sdk/node_modules/ajv": {
+ "version": "8.17.1",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
+ "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
+ "license": "MIT",
+ "dependencies": {
+ "fast-deep-equal": "^3.1.3",
+ "fast-uri": "^3.0.1",
+ "json-schema-traverse": "^1.0.0",
+ "require-from-string": "^2.0.2"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/epoberezkin"
+ }
+ },
+ "node_modules/@modelcontextprotocol/sdk/node_modules/json-schema-traverse": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
+ "license": "MIT"
+ },
"node_modules/@mongodb-js/saslprep": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.2.2.tgz",
@@ -4528,6 +4583,40 @@
"node": ">=6.5"
}
},
+ "node_modules/accepts": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz",
+ "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-types": "^3.0.0",
+ "negotiator": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/accepts/node_modules/mime-db": {
+ "version": "1.54.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
+ "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/accepts/node_modules/mime-types": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
+ "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "^1.54.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/acorn": {
"version": "8.14.1",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz",
@@ -4589,6 +4678,45 @@
"url": "https://github.com/sponsors/epoberezkin"
}
},
+ "node_modules/ajv-formats": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
+ "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
+ "license": "MIT",
+ "dependencies": {
+ "ajv": "^8.0.0"
+ },
+ "peerDependencies": {
+ "ajv": "^8.0.0"
+ },
+ "peerDependenciesMeta": {
+ "ajv": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/ajv-formats/node_modules/ajv": {
+ "version": "8.17.1",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
+ "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
+ "license": "MIT",
+ "dependencies": {
+ "fast-deep-equal": "^3.1.3",
+ "fast-uri": "^3.0.1",
+ "json-schema-traverse": "^1.0.0",
+ "require-from-string": "^2.0.2"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/epoberezkin"
+ }
+ },
+ "node_modules/ajv-formats/node_modules/json-schema-traverse": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
+ "license": "MIT"
+ },
"node_modules/ansi-escapes": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz",
@@ -4868,6 +4996,26 @@
"svelte": "^5.33.0"
}
},
+ "node_modules/body-parser": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz",
+ "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==",
+ "license": "MIT",
+ "dependencies": {
+ "bytes": "^3.1.2",
+ "content-type": "^1.0.5",
+ "debug": "^4.4.0",
+ "http-errors": "^2.0.0",
+ "iconv-lite": "^0.6.3",
+ "on-finished": "^2.4.1",
+ "qs": "^6.14.0",
+ "raw-body": "^3.0.0",
+ "type-is": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/bowser": {
"version": "2.12.1",
"resolved": "https://registry.npmjs.org/bowser/-/bowser-2.12.1.tgz",
@@ -4986,6 +5134,15 @@
"node": "*"
}
},
+ "node_modules/bytes": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
+ "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/cac": {
"version": "6.7.14",
"resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz",
@@ -5008,6 +5165,22 @@
"node": ">= 0.4"
}
},
+ "node_modules/call-bound": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
+ "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "get-intrinsic": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -5255,6 +5428,27 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/content-disposition": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz",
+ "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==",
+ "license": "MIT",
+ "dependencies": {
+ "safe-buffer": "5.2.1"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/content-type": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
+ "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/cookie": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
@@ -5265,6 +5459,15 @@
"node": ">= 0.6"
}
},
+ "node_modules/cookie-signature": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz",
+ "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.6.0"
+ }
+ },
"node_modules/copy-anything": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-3.0.5.tgz",
@@ -5281,6 +5484,19 @@
"url": "https://github.com/sponsors/mesqueeb"
}
},
+ "node_modules/cors": {
+ "version": "2.8.5",
+ "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
+ "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
+ "license": "MIT",
+ "dependencies": {
+ "object-assign": "^4",
+ "vary": "^1"
+ },
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -5448,6 +5664,15 @@
"node": ">=0.4.0"
}
},
+ "node_modules/depd": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
+ "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/dequal": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
@@ -5573,6 +5798,12 @@
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
"license": "MIT"
},
+ "node_modules/ee-first": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
+ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==",
+ "license": "MIT"
+ },
"node_modules/electron-to-chromium": {
"version": "1.5.165",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.165.tgz",
@@ -5616,6 +5847,15 @@
"integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==",
"license": "MIT"
},
+ "node_modules/encodeurl": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
+ "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
@@ -6043,6 +6283,15 @@
"node": ">=0.10.0"
}
},
+ "node_modules/etag": {
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
+ "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/event-target-shim": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
@@ -6067,6 +6316,27 @@
"node": ">=0.8.x"
}
},
+ "node_modules/eventsource": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz",
+ "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==",
+ "license": "MIT",
+ "dependencies": {
+ "eventsource-parser": "^3.0.1"
+ },
+ "engines": {
+ "node": ">=18.0.0"
+ }
+ },
+ "node_modules/eventsource-parser": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz",
+ "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18.0.0"
+ }
+ },
"node_modules/exact-mirror": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/exact-mirror/-/exact-mirror-0.1.2.tgz",
@@ -6125,6 +6395,93 @@
"node": ">=12.0.0"
}
},
+ "node_modules/express": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
+ "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
+ "license": "MIT",
+ "dependencies": {
+ "accepts": "^2.0.0",
+ "body-parser": "^2.2.0",
+ "content-disposition": "^1.0.0",
+ "content-type": "^1.0.5",
+ "cookie": "^0.7.1",
+ "cookie-signature": "^1.2.1",
+ "debug": "^4.4.0",
+ "encodeurl": "^2.0.0",
+ "escape-html": "^1.0.3",
+ "etag": "^1.8.1",
+ "finalhandler": "^2.1.0",
+ "fresh": "^2.0.0",
+ "http-errors": "^2.0.0",
+ "merge-descriptors": "^2.0.0",
+ "mime-types": "^3.0.0",
+ "on-finished": "^2.4.1",
+ "once": "^1.4.0",
+ "parseurl": "^1.3.3",
+ "proxy-addr": "^2.0.7",
+ "qs": "^6.14.0",
+ "range-parser": "^1.2.1",
+ "router": "^2.2.0",
+ "send": "^1.1.0",
+ "serve-static": "^2.2.0",
+ "statuses": "^2.0.1",
+ "type-is": "^2.0.1",
+ "vary": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 18"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
+ }
+ },
+ "node_modules/express-rate-limit": {
+ "version": "7.5.1",
+ "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz",
+ "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 16"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/express-rate-limit"
+ },
+ "peerDependencies": {
+ "express": ">= 4.11"
+ }
+ },
+ "node_modules/express/node_modules/cookie": {
+ "version": "0.7.2",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
+ "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/express/node_modules/mime-db": {
+ "version": "1.54.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
+ "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/express/node_modules/mime-types": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
+ "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "^1.54.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/exsolve": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.5.tgz",
@@ -6148,7 +6505,6 @@
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
- "dev": true,
"license": "MIT"
},
"node_modules/fast-fifo": {
@@ -6215,6 +6571,22 @@
"integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==",
"license": "MIT"
},
+ "node_modules/fast-uri": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz",
+ "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/fastify"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/fastify"
+ }
+ ],
+ "license": "BSD-3-Clause"
+ },
"node_modules/fast-xml-parser": {
"version": "5.2.5",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.2.5.tgz",
@@ -6305,6 +6677,23 @@
"node": ">=8"
}
},
+ "node_modules/finalhandler": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz",
+ "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.4.0",
+ "encodeurl": "^2.0.0",
+ "escape-html": "^1.0.3",
+ "on-finished": "^2.4.1",
+ "parseurl": "^1.3.3",
+ "statuses": "^2.0.1"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/find-cache-dir": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz",
@@ -6434,6 +6823,15 @@
"node": ">= 12.20"
}
},
+ "node_modules/forwarded": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
+ "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/fraction.js": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz",
@@ -6447,6 +6845,15 @@
"url": "https://github.com/sponsors/rawify"
}
},
+ "node_modules/fresh": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz",
+ "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@@ -6766,6 +7173,31 @@
"node": ">=12"
}
},
+ "node_modules/http-errors": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
+ "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
+ "license": "MIT",
+ "dependencies": {
+ "depd": "2.0.0",
+ "inherits": "2.0.4",
+ "setprototypeof": "1.2.0",
+ "statuses": "2.0.1",
+ "toidentifier": "1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/http-errors/node_modules/statuses": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
+ "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/http-proxy-agent": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
@@ -6924,7 +7356,6 @@
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
- "dev": true,
"license": "ISC"
},
"node_modules/inline-style-parser": {
@@ -6952,6 +7383,15 @@
"node": ">= 12"
}
},
+ "node_modules/ipaddr.js": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
+ "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
"node_modules/is-arrayish": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
@@ -7050,6 +7490,12 @@
"integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
"license": "MIT"
},
+ "node_modules/is-promise": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz",
+ "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==",
+ "license": "MIT"
+ },
"node_modules/is-reference": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz",
@@ -7854,6 +8300,15 @@
"node": ">= 0.4"
}
},
+ "node_modules/media-typer": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz",
+ "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/memory-pager": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
@@ -7861,6 +8316,18 @@
"devOptional": true,
"license": "MIT"
},
+ "node_modules/merge-descriptors": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz",
+ "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/merge-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
@@ -8281,6 +8748,15 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/negotiator": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+ "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/neo-async": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
@@ -8443,6 +8919,18 @@
"node": ">= 6"
}
},
+ "node_modules/object-inspect": {
+ "version": "1.13.4",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
+ "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/object-stream": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/object-stream/-/object-stream-0.0.1.tgz",
@@ -8469,6 +8957,18 @@
"node": ">=14.0.0"
}
},
+ "node_modules/on-finished": {
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
+ "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
+ "license": "MIT",
+ "dependencies": {
+ "ee-first": "1.1.1"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -8712,6 +9212,15 @@
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
+ "node_modules/parseurl": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
+ "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/path-exists": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
@@ -8769,6 +9278,16 @@
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
"license": "ISC"
},
+ "node_modules/path-to-regexp": {
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz",
+ "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==",
+ "license": "MIT",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
+ }
+ },
"node_modules/path-type": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
@@ -8924,6 +9443,15 @@
"node": ">= 6"
}
},
+ "node_modules/pkce-challenge": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz",
+ "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=16.20.0"
+ }
+ },
"node_modules/pkg-dir": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
@@ -9460,6 +9988,19 @@
"node": "^16 || ^18 || >=20"
}
},
+ "node_modules/proxy-addr": {
+ "version": "2.0.7",
+ "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
+ "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
+ "license": "MIT",
+ "dependencies": {
+ "forwarded": "0.2.0",
+ "ipaddr.js": "1.9.1"
+ },
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
"node_modules/psl": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz",
@@ -9502,6 +10043,21 @@
"teleport": ">=0.2.0"
}
},
+ "node_modules/qs": {
+ "version": "6.14.0",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
+ "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "side-channel": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=0.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/quansync": {
"version": "0.2.10",
"resolved": "https://registry.npmjs.org/quansync/-/quansync-0.2.10.tgz",
@@ -9551,6 +10107,46 @@
"integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==",
"license": "MIT"
},
+ "node_modules/range-parser": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
+ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/raw-body": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz",
+ "integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==",
+ "license": "MIT",
+ "dependencies": {
+ "bytes": "3.1.2",
+ "http-errors": "2.0.0",
+ "iconv-lite": "0.7.0",
+ "unpipe": "1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.10"
+ }
+ },
+ "node_modules/raw-body/node_modules/iconv-lite": {
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz",
+ "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==",
+ "license": "MIT",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
+ }
+ },
"node_modules/react-is": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz",
@@ -9606,6 +10202,15 @@
"node": ">= 12.13.0"
}
},
+ "node_modules/require-from-string": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
+ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
@@ -9751,6 +10356,22 @@
"integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==",
"license": "MIT"
},
+ "node_modules/router": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz",
+ "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.4.0",
+ "depd": "^2.0.0",
+ "is-promise": "^4.0.0",
+ "parseurl": "^1.3.3",
+ "path-to-regexp": "^8.0.0"
+ },
+ "engines": {
+ "node": ">= 18"
+ }
+ },
"node_modules/rrweb-cssom": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz",
@@ -9912,6 +10533,64 @@
"node": ">=10"
}
},
+ "node_modules/send": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz",
+ "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.3.5",
+ "encodeurl": "^2.0.0",
+ "escape-html": "^1.0.3",
+ "etag": "^1.8.1",
+ "fresh": "^2.0.0",
+ "http-errors": "^2.0.0",
+ "mime-types": "^3.0.1",
+ "ms": "^2.1.3",
+ "on-finished": "^2.4.1",
+ "range-parser": "^1.2.1",
+ "statuses": "^2.0.1"
+ },
+ "engines": {
+ "node": ">= 18"
+ }
+ },
+ "node_modules/send/node_modules/mime-db": {
+ "version": "1.54.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
+ "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/send/node_modules/mime-types": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
+ "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "^1.54.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/serve-static": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz",
+ "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==",
+ "license": "MIT",
+ "dependencies": {
+ "encodeurl": "^2.0.0",
+ "escape-html": "^1.0.3",
+ "parseurl": "^1.3.3",
+ "send": "^1.2.0"
+ },
+ "engines": {
+ "node": ">= 18"
+ }
+ },
"node_modules/set-cookie-parser": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz",
@@ -9919,6 +10598,12 @@
"devOptional": true,
"license": "MIT"
},
+ "node_modules/setprototypeof": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
+ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==",
+ "license": "ISC"
+ },
"node_modules/sharp": {
"version": "0.33.5",
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz",
@@ -9979,6 +10664,78 @@
"node": ">=8"
}
},
+ "node_modules/side-channel": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
+ "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "object-inspect": "^1.13.3",
+ "side-channel-list": "^1.0.0",
+ "side-channel-map": "^1.0.1",
+ "side-channel-weakmap": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/side-channel-list": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
+ "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "object-inspect": "^1.13.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/side-channel-map": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
+ "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.2",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.5",
+ "object-inspect": "^1.13.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/side-channel-weakmap": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
+ "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.2",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.5",
+ "object-inspect": "^1.13.3",
+ "side-channel-map": "^1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/siginfo": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
@@ -10146,6 +10903,15 @@
"integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
"license": "MIT"
},
+ "node_modules/statuses": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
+ "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/std-env": {
"version": "3.9.0",
"resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz",
@@ -10911,6 +11677,15 @@
"node": ">=8.0"
}
},
+ "node_modules/toidentifier": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
+ "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.6"
+ }
+ },
"node_modules/token-types": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-6.0.0.tgz",
@@ -11015,6 +11790,41 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/type-is": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz",
+ "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==",
+ "license": "MIT",
+ "dependencies": {
+ "content-type": "^1.0.5",
+ "media-typer": "^1.1.0",
+ "mime-types": "^3.0.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/type-is/node_modules/mime-db": {
+ "version": "1.54.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
+ "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/type-is/node_modules/mime-types": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
+ "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "^1.54.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/typescript": {
"version": "5.8.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz",
@@ -11100,6 +11910,15 @@
"node": ">= 4.0.0"
}
},
+ "node_modules/unpipe": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
+ "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/unplugin": {
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.16.1.tgz",
@@ -11232,6 +12051,15 @@
"integrity": "sha512-lKxKYG6H03yCZUpAGOPOsMcGxd1RHCu1iKvEHYDPmTyq2HueGhD73ssNBqqQWfvYs04G9iUFRvmAVLW20Jw6ow==",
"license": "MIT"
},
+ "node_modules/vary": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
+ "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
"node_modules/vite": {
"version": "6.3.5",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz",
@@ -11819,6 +12647,15 @@
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
+ },
+ "node_modules/zod-to-json-schema": {
+ "version": "3.24.6",
+ "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz",
+ "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==",
+ "license": "ISC",
+ "peerDependencies": {
+ "zod": "^3.24.1"
+ }
}
}
}
diff --git a/package.json b/package.json
index 20aed91ac95adf3c938f5ad2dec420d258888b0c..7b09fe536576fd0990cd9b83a4b09e193d249bc6 100644
--- a/package.json
+++ b/package.json
@@ -70,6 +70,7 @@
"@elysiajs/swagger": "^1.3.0",
"@huggingface/hub": "^2.2.0",
"@huggingface/inference": "^4.11.3",
+ "@modelcontextprotocol/sdk": "^1.21.1",
"@iconify-json/bi": "^1.1.21",
"@resvg/resvg-js": "^2.6.2",
"autoprefixer": "^10.4.14",
diff --git a/src/hooks.server.ts b/src/hooks.server.ts
index 02ca0d9bb185a688064d3d823e88f44cf99234a6..02aa62b31658469945abf713aeba6a81026e8c1b 100644
--- a/src/hooks.server.ts
+++ b/src/hooks.server.ts
@@ -19,6 +19,7 @@ import { refreshConversationStats } from "$lib/jobs/refresh-conversation-stats";
import { adminTokenManager } from "$lib/server/adminToken";
import { isHostLocalhost } from "$lib/server/isURLLocal";
import { MetricsServer } from "$lib/server/metrics";
+import { loadMcpServersOnStartup } from "$lib/server/mcp/registry";
export const init: ServerInit = async () => {
// Wait for config to be fully loaded
@@ -49,6 +50,9 @@ export const init: ServerInit = async () => {
checkAndRunMigrations();
refreshConversationStats();
+ // Load MCP servers at startup
+ loadMcpServersOnStartup();
+
// Init AbortedGenerations refresh process
AbortedGenerations.getInstance();
diff --git a/src/lib/components/NavMenu.svelte b/src/lib/components/NavMenu.svelte
index da5fbf1284890b3710c0c5b313e6446b81e333a8..dbf56eb7ea7d07be624625278d2d721f1bf3b9ab 100644
--- a/src/lib/components/NavMenu.svelte
+++ b/src/lib/components/NavMenu.svelte
@@ -28,6 +28,8 @@
import { usePublicConfig } from "$lib/utils/PublicConfig.svelte";
import { useAPIClient, handleResponse } from "$lib/APIClient";
import { requireAuthUser } from "$lib/utils/auth";
+ import { enabledServersCount } from "$lib/stores/mcpServers";
+ import MCPServerManager from "./mcp/MCPServerManager.svelte";
const publicConfig = usePublicConfig();
const client = useAPIClient();
@@ -112,6 +114,7 @@
let isDark = $state(false);
let unsubscribeTheme: (() => void) | undefined;
+ let showMcpModal = $state(false);
if (browser) {
unsubscribeTheme = subscribeToTheme(({ isDark: nextIsDark }) => {
@@ -194,6 +197,22 @@
>
+ {#if user?.username || user?.email}
+ (showMcpModal = true)}
+ class="flex h-9 flex-none items-center gap-1.5 rounded-lg pl-2.5 pr-2 text-gray-500 hover:bg-gray-100 dark:text-gray-400 dark:hover:bg-gray-700"
+ >
+ MCP Servers
+ {#if $enabledServersCount > 0}
+
+ {$enabledServersCount}
+
+ {/if}
+
+ {/if}
+
+
+{#if showMcpModal}
+ (showMcpModal = false)} />
+{/if}
diff --git a/src/lib/components/Switch.svelte b/src/lib/components/Switch.svelte
index 4bc094edc51700d6c6c1e323ef6e030c8896cd6a..64db16bb28fc355361ef94a13c9fb746e4820e10 100644
--- a/src/lib/components/Switch.svelte
+++ b/src/lib/components/Switch.svelte
@@ -27,7 +27,7 @@
tabindex="0"
onclick={toggle}
onkeydown={onKeydown}
- class="relative inline-flex h-5 w-9 shrink-0 cursor-pointer items-center rounded-full bg-gray-300 p-1 shadow-inner ring-gray-400 peer-checked:bg-black hover:bg-gray-400 focus-visible:ring focus-visible:ring-offset-1 dark:bg-gray-600 dark:ring-gray-700 dark:peer-checked:bg-blue-600 dark:hover:bg-gray-500 peer-checked:[&>div]:translate-x-3.5"
+ class="relative inline-flex h-5 w-9 shrink-0 cursor-pointer items-center rounded-full bg-gray-300 p-1 shadow-inner ring-gray-400 peer-checked:bg-blue-600 hover:bg-gray-400 peer-checked:hover:bg-blue-600 focus-visible:ring focus-visible:ring-offset-1 dark:bg-gray-600 dark:ring-gray-700 dark:hover:bg-gray-500 dark:peer-checked:hover:bg-blue-600 peer-checked:[&>div]:translate-x-3.5"
>
diff --git a/src/lib/components/chat/ChatInput.svelte b/src/lib/components/chat/ChatInput.svelte
index 66d30e35c391804e3689056bbf1fa35f7c00683a..b90d4d8b3d0a11f133cf273a1864a0b36de6e8c6 100644
--- a/src/lib/components/chat/ChatInput.svelte
+++ b/src/lib/components/chat/ChatInput.svelte
@@ -10,11 +10,21 @@
import CarbonUpload from "~icons/carbon/upload";
import CarbonLink from "~icons/carbon/link";
import CarbonChevronRight from "~icons/carbon/chevron-right";
+ import CarbonClose from "~icons/carbon/close";
import UrlFetchModal from "./UrlFetchModal.svelte";
import { TEXT_MIME_ALLOWLIST, IMAGE_MIME_ALLOWLIST_DEFAULT } from "$lib/constants/mime";
+ import MCPServerManager from "$lib/components/mcp/MCPServerManager.svelte";
+ import IconMCP from "$lib/components/icons/IconMCP.svelte";
import { isVirtualKeyboard } from "$lib/utils/isVirtualKeyboard";
import { requireAuthUser } from "$lib/utils/auth";
+ import {
+ enabledServersCount,
+ selectedServerIds,
+ allMcpServers,
+ toggleServer,
+ } from "$lib/stores/mcpServers";
+ import { getMcpServerFaviconUrl } from "$lib/utils/favicon";
interface Props {
files?: File[];
@@ -25,6 +35,8 @@
disabled?: boolean;
// tools removed
modelIsMultimodal?: boolean;
+ // Whether the currently selected model supports tool calling (incl. overrides)
+ modelSupportsTools?: boolean;
children?: import("svelte").Snippet;
onPaste?: (e: ClipboardEvent) => void;
focused?: boolean;
@@ -40,6 +52,7 @@
disabled = false,
modelIsMultimodal = false,
+ modelSupportsTools = true,
children,
onPaste,
focused = $bindable(false),
@@ -62,6 +75,7 @@
let fileInputEl: HTMLInputElement | undefined = $state();
let isUrlModalOpen = $state(false);
+ let isMcpManagerOpen = $state(false);
function openPickerWithAccept(accept: string) {
if (!fileInputEl) return;
@@ -243,10 +257,13 @@
e.preventDefault()}
+ interactOutsideBehavior="defer-otherwise-close"
>
{#if modelIsMultimodal}
e.preventDefault()}
+ interactOutsideBehavior="defer-otherwise-close"
>
+
+
+
+
+
+
+ MCP Servers
+
+
+
+
+
+ e.preventDefault()}
+ interactOutsideBehavior="defer-otherwise-close"
+ >
+ {#each $allMcpServers as server (server.id)}
+ toggleServer(server.id)}
+ closeOnSelect={false}
+ class="flex h-9 select-none items-center gap-2 rounded-md px-2 text-sm leading-none text-gray-800 data-[highlighted]:bg-gray-100 focus-visible:outline-none dark:text-gray-100 dark:data-[highlighted]:bg-white/10"
+ >
+ {#snippet children({ checked })}
+
+ {server.name}
+
+
+
+
+
+
+ {/snippet}
+
+ {/each}
+
+ {#if $allMcpServers.length > 0}
+
+ {/if}
+ (isMcpManagerOpen = true)}
+ >
+ Manage MCP Servers
+
+
+
+
+ {#if $enabledServersCount > 0}
+
+ (isMcpManagerOpen = true)}
+ class:line-through={!modelSupportsTools}
+ >
+ MCP ({$enabledServersCount})
+
+ selectedServerIds.set(new Set())}
+ type="button"
+ >
+
+
+
+ {/if}
{/if}
@@ -304,6 +420,10 @@
acceptMimeTypes={mimeTypes}
onfiles={handleFetchedFiles}
/>
+
+ {#if isMcpManagerOpen}
+ (isMcpManagerOpen = false)} />
+ {/if}
diff --git a/src/lib/components/icons/IconMCP.svelte b/src/lib/components/icons/IconMCP.svelte
new file mode 100644
index 0000000000000000000000000000000000000000..5707192ece10211f2f65ba7ab5f0a2b2b0b231ee
--- /dev/null
+++ b/src/lib/components/icons/IconMCP.svelte
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+
+
diff --git a/src/lib/components/mcp/AddServerForm.svelte b/src/lib/components/mcp/AddServerForm.svelte
new file mode 100644
index 0000000000000000000000000000000000000000..96a389b5202193fc8544e944481499974adca74b
--- /dev/null
+++ b/src/lib/components/mcp/AddServerForm.svelte
@@ -0,0 +1,250 @@
+
+
+
+
+
+
+ Server Name *
+
+
+
+
+
+
+
+ Server URL *
+
+
+
+
+
+
+
+
+ HTTP Headers (Optional)
+
+
+ {#if headers.length === 0}
+
No headers configured
+ {:else}
+ {#each headers as header, i}
+
+ {/each}
+ {/if}
+
+
+
+ Add Header
+
+
+
+ Common examples:
+ • Bearer token:
+ Authorization: Bearer YOUR_TOKEN
+ • API key:
+ X-API-Key: YOUR_KEY
+
+
+
+
+
+
+
+
+
+
Be careful with custom MCP servers.
+
+ They receive your requests (including conversation context and any headers you add) and
+ can run powerful tools on your behalf. Only add servers you trust and review their source.
+ Never share confidental informations.
+
+
+
+
+
+
+ {#if error}
+
+ {/if}
+
+
+
+
+ Cancel
+
+
+ {submitLabel}
+
+
+
diff --git a/src/lib/components/mcp/MCPServerManager.svelte b/src/lib/components/mcp/MCPServerManager.svelte
new file mode 100644
index 0000000000000000000000000000000000000000..16eaaa66615542ce86df0a09efd1e7258142f6d4
--- /dev/null
+++ b/src/lib/components/mcp/MCPServerManager.svelte
@@ -0,0 +1,185 @@
+
+
+
+
+
+
+
+ {#if currentView === "list"}
+ MCP Servers
+ {:else}
+ Add MCP server
+ {/if}
+
+
+ {#if currentView === "list"}
+ Manage MCP servers to extend {publicConfig.PUBLIC_APP_NAME} with external tools.
+ {:else}
+ Add a custom MCP server to {publicConfig.PUBLIC_APP_NAME}.
+ {/if}
+
+
+
+
+ {#if currentView === "list"}
+
+
+
+
+
+
+
+ {$allMcpServers.length}
+ {$allMcpServers.length === 1 ? "server" : "servers"} configured
+
+
+ {enabledCount} enabled
+
+
+
+
+
+
+
+ {isRefreshing ? "Refreshing…" : "Refresh"}
+
+ (currentView = "add")}
+ class="btn flex items-center gap-0.5 rounded-lg bg-blue-600 py-1.5 pl-2 pr-3 text-sm font-medium text-white hover:bg-blue-600"
+ >
+
+ Add Server
+
+
+
+
+
+ {#if baseServers.length > 0}
+
+
+ Base Servers ({baseServers.length})
+
+
+ {#each baseServers as server (server.id)}
+
+ {/each}
+
+
+ {/if}
+
+
+
+
+ Custom Servers ({customServers.length})
+
+ {#if customServers.length === 0}
+
+
+
+ No custom servers yet
+
+
+ Add your own MCP servers with custom tools
+
+
(currentView = "add")}
+ class="flex items-center gap-1.5 rounded-lg bg-blue-600 px-4 py-2 text-sm font-medium text-white hover:bg-blue-600"
+ >
+
+ Add Your First Server
+
+
+ {:else}
+
+ {#each customServers as server (server.id)}
+
+ {/each}
+
+ {/if}
+
+
+
+
+
💡 Quick Tips
+
+ • Only connect to servers you trust
+ • Enable servers to make their tools available in chat
+ • Use the Health Check button to verify server connectivity
+ • You can add HTTP headers for authentication when required
+
+
+
+ {:else if currentView === "add"}
+
+ {/if}
+
+
diff --git a/src/lib/components/mcp/ServerCard.svelte b/src/lib/components/mcp/ServerCard.svelte
new file mode 100644
index 0000000000000000000000000000000000000000..ca3e92e1bf1c1fa7ab5aafe64f62d48d39279742
--- /dev/null
+++ b/src/lib/components/mcp/ServerCard.svelte
@@ -0,0 +1,203 @@
+
+
+
+
+
+
+
+
+
+
+ {server.name}
+
+
+
+ {server.url}
+
+
+
+
+
isSelected, setEnabled} />
+
+
+
+ {#if server.status}
+
+
+ {#if server.status === "connected"}
+
+ {:else if server.status === "connecting"}
+
+ {:else if server.status === "error"}
+
+ {:else}
+
+ {/if}
+ {statusInfo.label}
+
+
+ {#if server.tools && server.tools.length > 0}
+
+
+ {server.tools.length}
+ {server.tools.length === 1 ? "tool" : "tools"}
+
+ {/if}
+
+ {/if}
+
+
+ {#if server.errorMessage}
+
+
+ {server.errorMessage}
+
+
+ {/if}
+
+
+
+
+
+ Health Check
+
+
+ {#if isHfMcp}
+
+
+ Settings
+
+ {/if}
+
+ {#if server.type === "custom"}
+
+
+ Delete
+
+ {/if}
+
+
+
+ {#if server.tools && server.tools.length > 0}
+
+
+ Available Tools ({server.tools.length})
+
+
+ {#each server.tools as tool}
+
+ {tool.name}
+ {#if tool.description}
+ - {tool.description}
+ {/if}
+
+ {/each}
+
+
+ {/if}
+
+
diff --git a/src/lib/server/api/routes/groups/models.ts b/src/lib/server/api/routes/groups/models.ts
index 87e2573807c506712394cd612702c5a83ffc916c..5949c6a363459e8b129f64e59f337f054a7c1269 100644
--- a/src/lib/server/api/routes/groups/models.ts
+++ b/src/lib/server/api/routes/groups/models.ts
@@ -21,6 +21,7 @@ export type GETModelsResponse = Array<{
preprompt?: string;
multimodal: boolean;
multimodalAcceptedMimetypes?: string[];
+ supportsTools?: boolean;
unlisted: boolean;
hasInferenceAPI: boolean;
// Mark router entry for UI decoration — always present
@@ -59,6 +60,7 @@ export const modelGroup = new Elysia().group("/models", (app) =>
preprompt: model.preprompt,
multimodal: model.multimodal,
multimodalAcceptedMimetypes: model.multimodalAcceptedMimetypes,
+ supportsTools: (model as unknown as { supportsTools?: boolean }).supportsTools ?? false,
unlisted: model.unlisted,
hasInferenceAPI: model.hasInferenceAPI,
isRouter: model.isRouter,
diff --git a/src/lib/server/api/routes/groups/user.ts b/src/lib/server/api/routes/groups/user.ts
index 21eadca78a36d927d36870da7a0d63f2aed8c450..0ef5b1d6e8cfb70082b6451a13e97cebd04f0a93 100644
--- a/src/lib/server/api/routes/groups/user.ts
+++ b/src/lib/server/api/routes/groups/user.ts
@@ -71,6 +71,7 @@ export const userGroup = new Elysia()
customPrompts: settings?.customPrompts ?? {},
multimodalOverrides: settings?.multimodalOverrides ?? {},
+ toolsOverrides: settings?.toolsOverrides ?? {},
};
})
.post("/settings", async ({ locals, request }) => {
@@ -85,14 +86,13 @@ export const userGroup = new Elysia()
activeModel: z.string().default(DEFAULT_SETTINGS.activeModel),
customPrompts: z.record(z.string()).default({}),
multimodalOverrides: z.record(z.boolean()).default({}),
+ toolsOverrides: z.record(z.boolean()).default({}),
disableStream: z.boolean().default(false),
directPaste: z.boolean().default(false),
hidePromptExamples: z.record(z.boolean()).default({}),
})
.parse(body) satisfies SettingsEditable;
- // Tools removed: ignore tools updates
-
await collections.settings.updateOne(
authCondition(locals),
{
diff --git a/src/lib/server/config.ts b/src/lib/server/config.ts
index 0ca6d522629f2177298c49a778d936c4bf39178d..8a597dd1c98cf80d09f95ad023acff309a93c78e 100644
--- a/src/lib/server/config.ts
+++ b/src/lib/server/config.ts
@@ -156,7 +156,9 @@ type ExtraConfigKeys =
| "OLD_MODELS"
| "ENABLE_ASSISTANTS"
| "METRICS_ENABLED"
- | "METRICS_PORT";
+ | "METRICS_PORT"
+ | "MCP_SERVERS"
+ | "MCP_FORWARD_HF_USER_TOKEN";
type ConfigProxy = ConfigManager & { [K in ConfigKey | ExtraConfigKeys]: string };
diff --git a/src/lib/server/endpoints/openai/endpointOai.ts b/src/lib/server/endpoints/openai/endpointOai.ts
index 259606ac9f14fcc92a17e30899a39e78f4fce6a1..ae79a31f36bda17ccc444fafc5f75e3ebfaebbad 100644
--- a/src/lib/server/endpoints/openai/endpointOai.ts
+++ b/src/lib/server/endpoints/openai/endpointOai.ts
@@ -171,12 +171,10 @@ export async function endpointOai(
await prepareMessages(messages, imageProcessor, isMultimodal ?? model.multimodal);
// Normalize preprompt and handle empty values
- const normalizedPreprompt =
- typeof preprompt === "string" ? preprompt.trim() : "";
+ const normalizedPreprompt = typeof preprompt === "string" ? preprompt.trim() : "";
- // Check if a system message already exists as the first message
- const hasSystemMessage =
- messagesOpenAI.length > 0 && messagesOpenAI[0]?.role === "system";
+ // Check if a system message already exists as the first message
+ const hasSystemMessage = messagesOpenAI.length > 0 && messagesOpenAI[0]?.role === "system";
if (hasSystemMessage) {
// Prepend normalized preprompt to existing system content when non-empty
@@ -188,15 +186,12 @@ export async function endpointOai(
messagesOpenAI[0].content =
normalizedPreprompt + (userSystemPrompt ? "\n\n" + userSystemPrompt : "");
}
- } else {
- // Insert a system message only if the preprompt is non-empty
- if (normalizedPreprompt) {
- messagesOpenAI = [
- { role: "system", content: normalizedPreprompt },
- ...messagesOpenAI,
- ];
- }
+ } else {
+ // Insert a system message only if the preprompt is non-empty
+ if (normalizedPreprompt) {
+ messagesOpenAI = [{ role: "system", content: normalizedPreprompt }, ...messagesOpenAI];
}
+ }
// Combine model defaults with request-specific parameters
const parameters = { ...model.parameters, ...generateSettings };
diff --git a/src/lib/server/endpoints/preprocessMessages.ts b/src/lib/server/endpoints/preprocessMessages.ts
index a8fdb32bf222be050e4f67a8f3de6d28e62ac24b..98e7955582ca16e325b926749adc8d327e3ea599 100644
--- a/src/lib/server/endpoints/preprocessMessages.ts
+++ b/src/lib/server/endpoints/preprocessMessages.ts
@@ -4,13 +4,13 @@ import { downloadFile } from "../files/downloadFile";
import type { ObjectId } from "mongodb";
export async function preprocessMessages(
- messages: Message[],
- convId: ObjectId
+ messages: Message[],
+ convId: ObjectId
): Promise {
- return Promise.resolve(messages)
- .then((msgs) => downloadFiles(msgs, convId))
- .then((msgs) => injectClipboardFiles(msgs))
- .then(stripEmptyInitialSystemMessage);
+ return Promise.resolve(messages)
+ .then((msgs) => downloadFiles(msgs, convId))
+ .then((msgs) => injectClipboardFiles(msgs))
+ .then(stripEmptyInitialSystemMessage);
}
async function downloadFiles(messages: Message[], convId: ObjectId): Promise {
@@ -24,8 +24,8 @@ async function downloadFiles(messages: Message[], convId: ObjectId): Promise {
+ return Promise.all(
+ messages.map((message) => {
const plaintextFiles = message.files
?.filter((file) => file.mime === "application/vnd.chatui.clipboard")
.map((file) => Buffer.from(file.value, "base64").toString("utf-8"));
@@ -37,8 +37,8 @@ async function injectClipboardFiles(messages: EndpointMessage[]) {
content: `${plaintextFiles.join("\n\n")}\n\n${message.content}`,
files: message.files?.filter((file) => file.mime !== "application/vnd.chatui.clipboard"),
};
- })
- );
+ })
+ );
}
/**
@@ -46,17 +46,16 @@ async function injectClipboardFiles(messages: EndpointMessage[]) {
* This prevents sending an empty system prompt to any provider.
*/
function stripEmptyInitialSystemMessage(messages: EndpointMessage[]): EndpointMessage[] {
- if (!messages?.length) return messages;
- const first = messages[0];
- if (first?.from !== "system") return messages;
+ if (!messages?.length) return messages;
+ const first = messages[0];
+ if (first?.from !== "system") return messages;
- const content = first?.content as unknown;
- const isEmpty =
- typeof content === "string" ? content.trim().length === 0 : false;
+ const content = first?.content as unknown;
+ const isEmpty = typeof content === "string" ? content.trim().length === 0 : false;
- if (isEmpty) {
- return messages.slice(1);
- }
+ if (isEmpty) {
+ return messages.slice(1);
+ }
- return messages;
+ return messages;
}
diff --git a/src/lib/server/mcp/clientPool.ts b/src/lib/server/mcp/clientPool.ts
new file mode 100644
index 0000000000000000000000000000000000000000..ff48354fb5c1dd9d90523327cee04434c4ee5a1d
--- /dev/null
+++ b/src/lib/server/mcp/clientPool.ts
@@ -0,0 +1,48 @@
+import { Client } from "@modelcontextprotocol/sdk/client";
+import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
+import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
+import type { McpServerConfig } from "./httpClient";
+
+const pool = new Map();
+
+function keyOf(server: McpServerConfig) {
+ const headers = Object.entries(server.headers ?? {})
+ .sort(([a], [b]) => a.localeCompare(b))
+ .map(([k, v]) => `${k}:${v}`)
+ .join("|\u0000|");
+ return `${server.url}|${headers}`;
+}
+
+export async function getClient(server: McpServerConfig, signal?: AbortSignal): Promise {
+ const key = keyOf(server);
+ const existing = pool.get(key);
+ if (existing) return existing;
+
+ const client = new Client({ name: "chat-ui-mcp", version: "0.1.0" });
+ const url = new URL(server.url);
+ const requestInit: RequestInit = { headers: server.headers, signal };
+ try {
+ try {
+ await client.connect(new StreamableHTTPClientTransport(url, { requestInit }));
+ } catch {
+ await client.connect(new SSEClientTransport(url, { requestInit }));
+ }
+ } catch (err) {
+ try {
+ await client.close?.();
+ } catch {}
+ throw err;
+ }
+
+ pool.set(key, client);
+ return client;
+}
+
+export async function drainPool() {
+ for (const [key, client] of pool) {
+ try {
+ await client.close?.();
+ } catch {}
+ pool.delete(key);
+ }
+}
diff --git a/src/lib/server/mcp/hf.ts b/src/lib/server/mcp/hf.ts
new file mode 100644
index 0000000000000000000000000000000000000000..315724106a0f3d31458d4493e0d9b917cd45f4d3
--- /dev/null
+++ b/src/lib/server/mcp/hf.ts
@@ -0,0 +1,21 @@
+// Minimal shared helpers for HF MCP token forwarding
+
+export const hasAuthHeader = (h?: Record) =>
+ !!h && Object.keys(h).some((k) => k.toLowerCase() === "authorization");
+
+export const isStrictHfMcpLogin = (urlString: string) => {
+ try {
+ const u = new URL(urlString);
+ return (
+ u.protocol === "https:" &&
+ u.hostname === "huggingface.co" &&
+ u.pathname === "/mcp" &&
+ u.search === "?login"
+ );
+ } catch {
+ return false;
+ }
+};
+
+export const hasNonEmptyToken = (tok: unknown): tok is string =>
+ typeof tok === "string" && tok.trim().length > 0;
diff --git a/src/lib/server/mcp/httpClient.ts b/src/lib/server/mcp/httpClient.ts
new file mode 100644
index 0000000000000000000000000000000000000000..abd5600baf2489ae61626087bf95720af3d6a4f1
--- /dev/null
+++ b/src/lib/server/mcp/httpClient.ts
@@ -0,0 +1,61 @@
+import { Client } from "@modelcontextprotocol/sdk/client";
+import { getClient } from "./clientPool";
+
+export interface McpServerConfig {
+ name: string;
+ url: string;
+ headers?: Record;
+}
+
+const DEFAULT_TIMEOUT_MS = 30_000;
+
+export type McpToolTextResponse = {
+ text: string;
+ /** If the server returned structuredContent, include it raw */
+ structured?: unknown;
+ /** Raw content blocks returned by the server, if any */
+ content?: unknown[];
+};
+
+export async function callMcpTool(
+ server: McpServerConfig,
+ tool: string,
+ args: unknown = {},
+ {
+ timeoutMs = DEFAULT_TIMEOUT_MS,
+ signal,
+ client,
+ }: { timeoutMs?: number; signal?: AbortSignal; client?: Client } = {}
+): Promise {
+ const normalizedArgs =
+ typeof args === "object" && args !== null && !Array.isArray(args)
+ ? (args as Record)
+ : undefined;
+
+ // Get a (possibly pooled) client. The client itself was connected with a signal
+ // that already composes outer cancellation. We still enforce a per-call timeout here.
+ const activeClient = client ?? (await getClient(server, signal));
+
+ // Prefer the SDK's built-in request controls (timeout, signal)
+ const response = await activeClient.callTool(
+ { name: tool, arguments: normalizedArgs },
+ undefined,
+ { signal, timeout: timeoutMs }
+ );
+
+ const parts = Array.isArray(response?.content) ? (response.content as Array) : [];
+ const textParts = parts
+ .filter((part): part is { type: "text"; text: string } => {
+ if (typeof part !== "object" || part === null) return false;
+ const obj = part as Record;
+ return obj["type"] === "text" && typeof obj["text"] === "string";
+ })
+ .map((p) => p.text);
+
+ const text = textParts.join("\n");
+ const structured = (response as unknown as { structuredContent?: unknown })?.structuredContent;
+ const contentBlocks = Array.isArray(response?.content)
+ ? (response.content as unknown[])
+ : undefined;
+ return { text, structured, content: contentBlocks };
+}
diff --git a/src/lib/server/mcp/registry.ts b/src/lib/server/mcp/registry.ts
new file mode 100644
index 0000000000000000000000000000000000000000..73e44abb5bd5c188ed667905129226b2cf665c2d
--- /dev/null
+++ b/src/lib/server/mcp/registry.ts
@@ -0,0 +1,76 @@
+import { config } from "$lib/server/config";
+import { logger } from "$lib/server/logger";
+import type { McpServerConfig } from "./httpClient";
+import { resetMcpToolsCache } from "./tools";
+
+let cachedRaw: string | null = null;
+let cachedServers: McpServerConfig[] = [];
+
+function parseServers(raw: string): McpServerConfig[] {
+ if (!raw) return [];
+
+ try {
+ const parsed = JSON.parse(raw);
+ if (!Array.isArray(parsed)) return [];
+
+ return parsed
+ .map((entry) => {
+ if (!entry || typeof entry !== "object") return undefined;
+ const name = (entry as Record).name;
+ const url = (entry as Record).url;
+ if (typeof name !== "string" || !name.trim()) return undefined;
+ if (typeof url !== "string" || !url.trim()) return undefined;
+
+ const headersRaw = (entry as Record).headers;
+ let headers: Record | undefined;
+ if (headersRaw && typeof headersRaw === "object" && !Array.isArray(headersRaw)) {
+ const headerEntries = Object.entries(headersRaw as Record).filter(
+ (entry): entry is [string, string] => typeof entry[1] === "string"
+ );
+ headers = Object.fromEntries(headerEntries);
+ }
+
+ return headers ? { name, url, headers } : { name, url };
+ })
+ .filter((server): server is McpServerConfig => Boolean(server));
+ } catch (error) {
+ logger.warn({ err: error }, "[mcp] failed to parse MCP_SERVERS env");
+ return [];
+ }
+}
+
+function setServers(raw: string) {
+ cachedServers = parseServers(raw);
+ cachedRaw = raw;
+ resetMcpToolsCache();
+ logger.debug({ count: cachedServers.length }, "[mcp] loaded server configuration");
+ console.log(
+ `[MCP] Loaded ${cachedServers.length} server(s):`,
+ cachedServers.map((s) => s.name).join(", ") || "none"
+ );
+}
+
+export function loadMcpServersOnStartup(): McpServerConfig[] {
+ const raw = config.MCP_SERVERS || "[]";
+ setServers(raw);
+ return cachedServers;
+}
+
+export function refreshMcpServersIfChanged(): void {
+ const currentRaw = config.MCP_SERVERS || "[]";
+ if (cachedRaw === null) {
+ setServers(currentRaw);
+ return;
+ }
+
+ if (currentRaw !== cachedRaw) {
+ setServers(currentRaw);
+ }
+}
+
+export function getMcpServers(): McpServerConfig[] {
+ if (cachedRaw === null) {
+ loadMcpServersOnStartup();
+ }
+ return cachedServers;
+}
diff --git a/src/lib/server/mcp/tools.ts b/src/lib/server/mcp/tools.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e64f822b1d1224f14ff5fb3795af56190d6159a0
--- /dev/null
+++ b/src/lib/server/mcp/tools.ts
@@ -0,0 +1,182 @@
+import { Client } from "@modelcontextprotocol/sdk/client";
+import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
+import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
+import type { McpServerConfig } from "./httpClient";
+
+export type OpenAiTool = {
+ type: "function";
+ function: { name: string; description?: string; parameters?: Record };
+};
+
+export interface McpToolMapping {
+ fnName: string;
+ server: string;
+ tool: string;
+}
+
+interface CacheEntry {
+ fetchedAt: number;
+ ttlMs: number;
+ tools: OpenAiTool[];
+ mapping: Record;
+}
+
+const DEFAULT_TTL_MS = 60_000;
+const cache = new Map();
+
+// Per OpenAI tool/function name guidelines most providers enforce:
+// ^[a-zA-Z0-9_-]{1,64}$
+// Dots are not universally accepted (e.g., MiniMax via HF router rejects them).
+// Normalize any disallowed characters (including ".") to underscore and trim to 64 chars.
+function sanitizeName(name: string) {
+ return name.replace(/[^a-zA-Z0-9_-]/g, "_").slice(0, 64);
+}
+
+function buildCacheKey(servers: McpServerConfig[]): string {
+ const normalized = servers
+ .map((server) => ({
+ name: server.name,
+ url: server.url,
+ headers: server.headers
+ ? Object.entries(server.headers)
+ .sort(([a], [b]) => a.localeCompare(b))
+ .map(([key, value]) => [key, value])
+ : [],
+ }))
+ .sort((a, b) => {
+ const byName = a.name.localeCompare(b.name);
+ if (byName !== 0) return byName;
+ return a.url.localeCompare(b.url);
+ });
+
+ return JSON.stringify(normalized);
+}
+
+type ListedTool = {
+ name?: string;
+ inputSchema?: Record;
+ description?: string;
+ annotations?: { title?: string };
+};
+
+async function listServerTools(
+ server: McpServerConfig,
+ opts: { signal?: AbortSignal } = {}
+): Promise {
+ const url = new URL(server.url);
+ const client = new Client({ name: "chat-ui-mcp", version: "0.1.0" });
+ try {
+ try {
+ const transport = new StreamableHTTPClientTransport(url, {
+ requestInit: { headers: server.headers, signal: opts.signal },
+ });
+ await client.connect(transport);
+ } catch {
+ const transport = new SSEClientTransport(url, {
+ requestInit: { headers: server.headers, signal: opts.signal },
+ });
+ await client.connect(transport);
+ }
+
+ const response = await client.listTools({});
+ return Array.isArray(response?.tools) ? (response.tools as ListedTool[]) : [];
+ } finally {
+ try {
+ await client.close?.();
+ } catch {
+ // ignore close errors
+ }
+ }
+}
+
+export async function getOpenAiToolsForMcp(
+ servers: McpServerConfig[],
+ { ttlMs = DEFAULT_TTL_MS, signal }: { ttlMs?: number; signal?: AbortSignal } = {}
+): Promise<{ tools: OpenAiTool[]; mapping: Record }> {
+ const now = Date.now();
+ const cacheKey = buildCacheKey(servers);
+ const cached = cache.get(cacheKey);
+ if (cached && now - cached.fetchedAt < cached.ttlMs) {
+ return { tools: cached.tools, mapping: cached.mapping };
+ }
+
+ const tools: OpenAiTool[] = [];
+ const mapping: Record = {};
+
+ const seenNames = new Set();
+
+ const pushToolDefinition = (
+ name: string,
+ description: string | undefined,
+ parameters: Record | undefined
+ ) => {
+ if (seenNames.has(name)) return;
+ tools.push({
+ type: "function",
+ function: {
+ name,
+ description,
+ parameters,
+ },
+ });
+ seenNames.add(name);
+ };
+
+ // Fetch tools in parallel; tolerate individual failures
+ const tasks = servers.map((server) => listServerTools(server, { signal }));
+ const results = await Promise.allSettled(tasks);
+
+ for (let i = 0; i < results.length; i++) {
+ const server = servers[i];
+ const r = results[i];
+ if (r.status === "fulfilled") {
+ const serverTools = r.value;
+ for (const tool of serverTools) {
+ if (typeof tool.name !== "string" || tool.name.trim().length === 0) {
+ continue;
+ }
+
+ const parameters =
+ tool.inputSchema && typeof tool.inputSchema === "object" ? tool.inputSchema : undefined;
+ const description = tool.description ?? tool.annotations?.title;
+ const toolName = tool.name;
+
+ // Emit a collision-aware function name.
+ // Prefer the plain tool name; on conflict, suffix with server name.
+ let plainName = sanitizeName(toolName);
+ if (plainName in mapping) {
+ const suffix = sanitizeName(server.name);
+ const candidate = `${plainName}_${suffix}`.slice(0, 64);
+ if (!(candidate in mapping)) {
+ plainName = candidate;
+ } else {
+ let i = 2;
+ let next = `${candidate}_${i}`;
+ while (i < 10 && next in mapping) {
+ i += 1;
+ next = `${candidate}_${i}`;
+ }
+ plainName = next.slice(0, 64);
+ }
+ }
+
+ pushToolDefinition(plainName, description, parameters);
+ mapping[plainName] = {
+ fnName: plainName,
+ server: server.name,
+ tool: toolName,
+ };
+ }
+ } else {
+ // ignore failure for this server
+ continue;
+ }
+ }
+
+ cache.set(cacheKey, { fetchedAt: now, ttlMs, tools, mapping });
+ return { tools, mapping };
+}
+
+export function resetMcpToolsCache() {
+ cache.clear();
+}
diff --git a/src/lib/server/models.ts b/src/lib/server/models.ts
index ba1d70ed1af35d57516593e6d56c2232ab81c0d7..e8dccc2c3ef4e49dc0eeef4cdcc95dd06095fbdc 100644
--- a/src/lib/server/models.ts
+++ b/src/lib/server/models.ts
@@ -56,6 +56,8 @@ const modelConfig = z.object({
.optional(),
multimodal: z.boolean().default(false),
multimodalAcceptedMimetypes: z.array(z.string()).optional(),
+ // Aggregated tool-calling capability across providers (HF router)
+ supportsTools: z.boolean().default(false),
unlisted: z.boolean().default(false),
embeddingModel: z.never().optional(),
/** Used to enable/disable system prompt usage */
@@ -234,6 +236,7 @@ const signatureForModel = (model: ProcessedModel) =>
}) ?? null,
multimodal: model.multimodal,
multimodalAcceptedMimetypes: model.multimodalAcceptedMimetypes,
+ supportsTools: (model as unknown as { supportsTools?: boolean }).supportsTools ?? false,
isRouter: model.isRouter,
hasInferenceAPI: model.hasInferenceAPI,
});
@@ -341,6 +344,9 @@ const buildModels = async (): Promise => {
);
const supportsImageInput =
inputModalities.includes("image") || inputModalities.includes("vision");
+
+ // If any provider supports tools, consider the model as supporting tools
+ const supportsTools = Boolean((m.providers ?? []).some((p) => p?.supports_tools === true));
return {
id: m.id,
name: m.id,
@@ -350,6 +356,7 @@ const buildModels = async (): Promise => {
providers: m.providers,
multimodal: supportsImageInput,
multimodalAcceptedMimetypes: supportsImageInput ? ["image/*"] : undefined,
+ supportsTools,
endpoints: [
{
type: "openai" as const,
@@ -405,6 +412,7 @@ const buildModels = async (): Promise => {
const routerAliasId = (config.PUBLIC_LLM_ROUTER_ALIAS_ID || "omni").trim() || "omni";
const routerMultimodalEnabled =
(config.LLM_ROUTER_ENABLE_MULTIMODAL || "").toLowerCase() === "true";
+ const routerToolsEnabled = (config.LLM_ROUTER_ENABLE_TOOLS || "").toLowerCase() === "true";
let decorated = builtModels as ProcessedModel[];
@@ -432,6 +440,10 @@ const buildModels = async (): Promise => {
aliasRaw.multimodalAcceptedMimetypes = ["image/*"];
}
+ if (routerToolsEnabled) {
+ aliasRaw.supportsTools = true;
+ }
+
const aliasBase = await processModel(aliasRaw);
// Create a self-referential ProcessedModel for the router endpoint
const aliasModel: ProcessedModel = {
diff --git a/src/lib/server/router/endpoint.ts b/src/lib/server/router/endpoint.ts
index 54ef59a8471a0adc1aa92ecadd10edb628421c2d..499e4cd3fb7a6d36e544f8b3f5c51c68dfecab95 100644
--- a/src/lib/server/router/endpoint.ts
+++ b/src/lib/server/router/endpoint.ts
@@ -12,6 +12,12 @@ import { archSelectRoute } from "./arch";
import { getRoutes, resolveRouteModels } from "./policy";
import { getApiToken } from "$lib/server/apiToken";
import { ROUTER_FAILURE } from "./types";
+import {
+ hasActiveToolsSelection,
+ isRouterToolsBypassEnabled,
+ pickToolsCapableModel,
+ ROUTER_TOOLS_ROUTE,
+} from "./toolsRoute";
const REASONING_BLOCK_REGEX = /[\s\S]*?(?:<\/think>|$)/g;
@@ -115,11 +121,14 @@ export async function makeRouterEndpoint(routerModel: ProcessedModel): Promise
(message.files ?? []).some(
(file) => typeof file?.mime === "string" && file.mime.startsWith("image/")
)
);
+ // Tools are considered "active" if the client indicated any enabled MCP server
+ const hasToolsActive = hasActiveToolsSelection(params.locals);
// Helper to create an OpenAI endpoint for a specific candidate model id
async function createCandidateEndpoint(candidateModelId: string): Promise {
@@ -230,6 +239,46 @@ export async function makeRouterEndpoint(routerModel: ProcessedModel): Promise {
+ try {
+ const all = await getModels();
+ return pickToolsCapableModel(all);
+ } catch (e) {
+ logger.warn({ err: String(e) }, "[router] failed to load models for tools lookup");
+ return undefined;
+ }
+ }
+
+ if (routerToolsEnabled && hasToolsActive) {
+ const toolsModel = await findToolsCandidateModel();
+ const toolsCandidate = toolsModel?.id ?? toolsModel?.name;
+ if (!toolsCandidate) {
+ // No tool-capable model found — continue with normal routing instead of hard failing
+ } else {
+ try {
+ logger.info(
+ { route: ROUTER_TOOLS_ROUTE, model: toolsCandidate },
+ "[router] tools active; bypassing Arch selection"
+ );
+ const ep = await createCandidateEndpoint(toolsCandidate);
+ const gen = await ep({ ...params });
+ return metadataThenStream(gen, toolsCandidate, ROUTER_TOOLS_ROUTE);
+ } catch (e) {
+ const { message, statusCode } = extractUpstreamError(e);
+ logger.error(
+ {
+ route: ROUTER_TOOLS_ROUTE,
+ model: toolsCandidate,
+ err: message,
+ ...(statusCode && { status: statusCode }),
+ },
+ "[router] tools fallback failed"
+ );
+ throw statusCode ? new HTTPError(message, statusCode) : new Error(message);
+ }
+ }
+ }
+
const routeSelection = await archSelectRoute(sanitizedMessages, undefined, params.locals);
// If arch router failed with an error, only hard-fail for policy errors (402/401/403)
diff --git a/src/lib/server/router/toolsRoute.ts b/src/lib/server/router/toolsRoute.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2004859131a9f997b21200f26a925339d739b3a7
--- /dev/null
+++ b/src/lib/server/router/toolsRoute.ts
@@ -0,0 +1,51 @@
+import { config } from "$lib/server/config";
+import { logger } from "$lib/server/logger";
+import type { ProcessedModel } from "../models";
+
+export const ROUTER_TOOLS_ROUTE = "agentic";
+
+type LocalsWithMcp = App.Locals & {
+ mcp?: {
+ selectedServers?: unknown[];
+ selectedServerNames?: unknown[];
+ };
+};
+
+export function isRouterToolsBypassEnabled(): boolean {
+ return (config.LLM_ROUTER_ENABLE_TOOLS || "").toLowerCase() === "true";
+}
+
+export function hasActiveToolsSelection(locals: App.Locals | undefined): boolean {
+ try {
+ const reqMcp = (locals as LocalsWithMcp | undefined)?.mcp;
+ const byConfig =
+ Array.isArray(reqMcp?.selectedServers) && (reqMcp?.selectedServers?.length ?? 0) > 0;
+ const byName =
+ Array.isArray(reqMcp?.selectedServerNames) && (reqMcp?.selectedServerNames?.length ?? 0) > 0;
+ return Boolean(byConfig || byName);
+ } catch {
+ return false;
+ }
+}
+
+export function pickToolsCapableModel(
+ models: ProcessedModel[] | undefined
+): ProcessedModel | undefined {
+ const preferredRaw = (config as unknown as Record).LLM_ROUTER_TOOLS_MODEL;
+ const preferred = preferredRaw?.trim();
+ if (!preferred) {
+ logger.warn("[router] tools bypass requested but LLM_ROUTER_TOOLS_MODEL is not set");
+ return undefined;
+ }
+ if (!models?.length) return undefined;
+ const found = models.find((m) => m.id === preferred || m.name === preferred);
+ if (!found) {
+ logger.warn(
+ { configuredModel: preferred },
+ "[router] configured tools model not found; falling back to Arch routing"
+ );
+ return undefined;
+ }
+ logger.info({ model: found.id ?? found.name }, "[router] using configured tools model");
+ return found;
+}
diff --git a/src/lib/server/textGeneration/generate.ts b/src/lib/server/textGeneration/generate.ts
index ae9c608c7c22e0c066bad695e09e3280c8b64752..c0e7cb88c65147aa5cdcb5c7431828dcab6813af 100644
--- a/src/lib/server/textGeneration/generate.ts
+++ b/src/lib/server/textGeneration/generate.ts
@@ -1,7 +1,14 @@
-import { MessageUpdateType, type MessageUpdate } from "$lib/types/MessageUpdate";
+import { config } from "$lib/server/config";
+import {
+ MessageReasoningUpdateType,
+ MessageUpdateType,
+ type MessageUpdate,
+} from "$lib/types/MessageUpdate";
import { AbortedGenerations } from "../abortedGenerations";
import type { TextGenerationContext } from "./types";
import type { EndpointMessage } from "../endpoints/endpoints";
+import { generateFromDefaultEndpoint } from "../generateFromDefaultEndpoint";
+import { generateSummaryOfReasoning } from "./reasoning";
import { logger } from "../logger";
type GenerateContext = Omit & { messages: EndpointMessage[] };
@@ -20,6 +27,30 @@ export async function* generate(
}: GenerateContext,
preprompt?: string
): AsyncIterable {
+ // Reasoning mode support
+ let reasoning = false;
+ let reasoningBuffer = "";
+ let lastReasoningUpdate = new Date();
+ let status = "";
+ const startTime = new Date();
+ const modelReasoning = Reflect.get(model, "reasoning") as
+ | { type: string; beginToken?: string; endToken?: string; regex?: string }
+ | undefined;
+ if (
+ modelReasoning &&
+ (modelReasoning.type === "regex" ||
+ modelReasoning.type === "summarize" ||
+ (modelReasoning.type === "tokens" && modelReasoning.beginToken === ""))
+ ) {
+ // Starts in reasoning mode and we extract the answer from the reasoning
+ reasoning = true;
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Status,
+ status: "Started reasoning...",
+ };
+ }
+
const stream = await endpoint({
messages,
preprompt,
@@ -32,20 +63,24 @@ export async function* generate(
});
for await (const output of stream) {
- // Check if this output contains router metadata
- if (
- "routerMetadata" in output &&
- output.routerMetadata &&
- ((output.routerMetadata.route && output.routerMetadata.model) ||
- output.routerMetadata.provider)
- ) {
- yield {
- type: MessageUpdateType.RouterMetadata,
- route: output.routerMetadata.route || "",
- model: output.routerMetadata.model || "",
- provider: output.routerMetadata.provider,
- };
- continue;
+ // Check if this output contains router metadata. Emit if either:
+ // 1) route+model are present (router models), or
+ // 2) provider-only is present (non-router models exposing x-inference-provider)
+ if ("routerMetadata" in output && output.routerMetadata) {
+ const hasRouteModel = Boolean(output.routerMetadata.route && output.routerMetadata.model);
+ const hasProviderOnly = Boolean(output.routerMetadata.provider);
+ if (hasRouteModel || hasProviderOnly) {
+ yield {
+ type: MessageUpdateType.RouterMetadata,
+ route: output.routerMetadata.route || "",
+ model: output.routerMetadata.model || "",
+ provider:
+ (output.routerMetadata
+ .provider as unknown as import("@huggingface/inference").InferenceProvider) ||
+ undefined,
+ };
+ continue;
+ }
}
// text generation completed
if (output.generated_text) {
@@ -60,19 +95,139 @@ export async function* generate(
text = text.slice(0, text.length - stopToken.length);
}
- yield {
- type: MessageUpdateType.FinalAnswer,
- text,
- interrupted,
- };
+ let finalAnswer = text;
+ if (modelReasoning && modelReasoning.type === "regex" && modelReasoning.regex) {
+ const regex = new RegExp(modelReasoning.regex);
+ finalAnswer = regex.exec(reasoningBuffer)?.[1] ?? text;
+ } else if (modelReasoning && modelReasoning.type === "summarize") {
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Status,
+ status: "Summarizing reasoning...",
+ };
+ try {
+ const summary = yield* generateFromDefaultEndpoint({
+ messages: [
+ {
+ from: "user",
+ content: `Question: ${messages[messages.length - 1].content}\n\nReasoning: ${reasoningBuffer}`,
+ },
+ ],
+ preprompt: `Your task is to summarize concisely all your reasoning steps and then give the final answer. Keep it short, one short paragraph at most. If the reasoning steps explicitly include a code solution, make sure to include it in your answer.`,
+ modelId: Reflect.get(model, "id") as string | undefined,
+ locals,
+ });
+ finalAnswer = summary;
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Status,
+ status: `Done in ${Math.round((new Date().getTime() - startTime.getTime()) / 1000)}s.`,
+ };
+ } catch (e) {
+ finalAnswer = text;
+ logger.error(e);
+ }
+ } else if (modelReasoning && modelReasoning.type === "tokens") {
+ // Remove the reasoning segment from final answer to avoid duplication
+ const beginIndex = modelReasoning.beginToken
+ ? reasoningBuffer.indexOf(modelReasoning.beginToken)
+ : 0;
+ const endIndex = modelReasoning.endToken
+ ? reasoningBuffer.lastIndexOf(modelReasoning.endToken)
+ : -1;
+
+ if (beginIndex !== -1 && endIndex !== -1 && modelReasoning.endToken) {
+ finalAnswer =
+ text.slice(0, beginIndex) + text.slice(endIndex + modelReasoning.endToken.length);
+ }
+ }
+
+ yield { type: MessageUpdateType.FinalAnswer, text: finalAnswer, interrupted };
continue;
}
+ if (modelReasoning && modelReasoning.type === "tokens") {
+ if (output.token.text === modelReasoning.beginToken) {
+ reasoning = true;
+ reasoningBuffer += output.token.text;
+ continue;
+ } else if (modelReasoning.endToken && output.token.text === modelReasoning.endToken) {
+ reasoning = false;
+ reasoningBuffer += output.token.text;
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Status,
+ status: `Done in ${Math.round((new Date().getTime() - startTime.getTime()) / 1000)}s.`,
+ };
+ continue;
+ }
+ }
+
// ignore special tokens
if (output.token.special) continue;
- // yield normal token
- yield { type: MessageUpdateType.Stream, token: output.token.text };
+ // pass down normal token
+ if (reasoning) {
+ reasoningBuffer += output.token.text;
+
+ if (modelReasoning && modelReasoning.type === "tokens" && modelReasoning.endToken) {
+ if (reasoningBuffer.lastIndexOf(modelReasoning.endToken) !== -1) {
+ const endTokenIndex = reasoningBuffer.lastIndexOf(modelReasoning.endToken);
+ const textBuffer = reasoningBuffer.slice(endTokenIndex + modelReasoning.endToken.length);
+ reasoningBuffer = reasoningBuffer.slice(
+ 0,
+ endTokenIndex + modelReasoning.endToken.length + 1
+ );
+
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Stream,
+ token: output.token.text,
+ };
+ yield { type: MessageUpdateType.Stream, token: textBuffer };
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Status,
+ status: `Done in ${Math.round((new Date().getTime() - startTime.getTime()) / 1000)}s.`,
+ };
+ reasoning = false;
+ continue;
+ }
+ }
+
+ // yield status update if it has changed
+ if (status !== "") {
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Status,
+ status,
+ };
+ status = "";
+ }
+
+ // create a new status every ~4s (optional)
+ if (
+ Reflect.get(config, "REASONING_SUMMARY") === "true" &&
+ new Date().getTime() - lastReasoningUpdate.getTime() > 4000
+ ) {
+ lastReasoningUpdate = new Date();
+ try {
+ generateSummaryOfReasoning(reasoningBuffer, model.id, locals).then((summary) => {
+ status = summary;
+ });
+ } catch (e) {
+ logger.error(e);
+ }
+ }
+
+ yield {
+ type: MessageUpdateType.Reasoning,
+ subtype: MessageReasoningUpdateType.Stream,
+ token: output.token.text,
+ };
+ } else {
+ yield { type: MessageUpdateType.Stream, token: output.token.text };
+ }
// abort check
const date = AbortedGenerations.getInstance().getAbortTime(conv._id.toString());
diff --git a/src/lib/server/textGeneration/index.ts b/src/lib/server/textGeneration/index.ts
index c7b7c70a1c7a86bc98a41b511cbfd7bc272ab8b8..0ff9bc4a900f7997db074819187f86564c5b8cb4 100644
--- a/src/lib/server/textGeneration/index.ts
+++ b/src/lib/server/textGeneration/index.ts
@@ -7,6 +7,7 @@ import {
MessageUpdateStatus,
} from "$lib/types/MessageUpdate";
import { generate } from "./generate";
+import { runMcpFlow } from "./mcp/runMcpFlow";
import { mergeAsyncGenerators } from "$lib/utils/mergeAsyncGenerators";
import type { TextGenerationContext } from "./types";
@@ -47,6 +48,34 @@ async function* textGenerationWithoutTitle(
const preprompt = conv.preprompt;
const processedMessages = await preprocessMessages(messages, convId);
- yield* generate({ ...ctx, messages: processedMessages }, preprompt);
+
+ // Try MCP tool flow first; fall back to default generation if not selected/available
+ try {
+ const mcpGen = runMcpFlow({
+ model: ctx.model,
+ conv,
+ messages: processedMessages,
+ assistant: ctx.assistant,
+ forceMultimodal: ctx.forceMultimodal,
+ forceTools: ctx.forceTools,
+ locals: ctx.locals,
+ preprompt,
+ abortSignal: ctx.abortController.signal,
+ });
+
+ let step = await mcpGen.next();
+ while (!step.done) {
+ yield step.value;
+ step = await mcpGen.next();
+ }
+ const didRunMcp = Boolean(step.value);
+ if (!didRunMcp) {
+ // fallback to normal text generation
+ yield* generate({ ...ctx, messages: processedMessages }, preprompt);
+ }
+ } catch {
+ // On any MCP error, fall back to normal generation
+ yield* generate({ ...ctx, messages: processedMessages }, preprompt);
+ }
done.abort();
}
diff --git a/src/lib/server/textGeneration/mcp/routerResolution.ts b/src/lib/server/textGeneration/mcp/routerResolution.ts
new file mode 100644
index 0000000000000000000000000000000000000000..06d7a49d02c38640266df3f2ff3ad1fb07a9eb55
--- /dev/null
+++ b/src/lib/server/textGeneration/mcp/routerResolution.ts
@@ -0,0 +1,105 @@
+import { config } from "$lib/server/config";
+import { archSelectRoute } from "$lib/server/router/arch";
+import { getRoutes, resolveRouteModels } from "$lib/server/router/policy";
+import {
+ hasActiveToolsSelection,
+ isRouterToolsBypassEnabled,
+ pickToolsCapableModel,
+ ROUTER_TOOLS_ROUTE,
+} from "$lib/server/router/toolsRoute";
+import type { EndpointMessage } from "../../endpoints/endpoints";
+import { stripReasoningFromMessageForRouting } from "../utils/routing";
+import type { ProcessedModel } from "../../models";
+import { logger } from "../../logger";
+
+export interface RouterResolutionInput {
+ model: ProcessedModel;
+ messages: EndpointMessage[];
+ conversationId: string;
+ hasImageInput: boolean;
+ locals: App.Locals | undefined;
+}
+
+export interface RouterResolutionResult {
+ runMcp: boolean;
+ targetModel: ProcessedModel;
+ candidateModelId?: string;
+ resolvedRoute?: string;
+}
+
+export async function resolveRouterTarget({
+ model,
+ messages,
+ conversationId,
+ hasImageInput,
+ locals,
+}: RouterResolutionInput): Promise {
+ let targetModel = model;
+ let candidateModelId: string | undefined;
+ let resolvedRoute: string | undefined;
+ let runMcp = true;
+
+ if (!model.isRouter) {
+ return { runMcp, targetModel };
+ }
+
+ try {
+ const mod = await import("../../models");
+ const allModels = mod.models as ProcessedModel[];
+
+ if (hasImageInput) {
+ const multimodalCandidate = allModels?.find(
+ (candidate) => !candidate.isRouter && candidate.multimodal
+ );
+ if (multimodalCandidate) {
+ targetModel = multimodalCandidate;
+ candidateModelId = multimodalCandidate.id ?? multimodalCandidate.name;
+ resolvedRoute = "multimodal";
+ } else {
+ runMcp = false;
+ }
+ } else {
+ // If tools are enabled and at least one MCP server is active, prefer a tools-capable model
+ const toolsEnabled = isRouterToolsBypassEnabled();
+ const hasToolsActive = hasActiveToolsSelection(locals);
+
+ if (toolsEnabled && hasToolsActive) {
+ const found = pickToolsCapableModel(allModels);
+ if (found) {
+ targetModel = found;
+ candidateModelId = found.id ?? found.name;
+ resolvedRoute = ROUTER_TOOLS_ROUTE;
+ // Continue; runMcp remains true
+ return { runMcp, targetModel, candidateModelId, resolvedRoute };
+ }
+ // No tools-capable model found; fall back to normal Arch routing below
+ }
+ const routes = await getRoutes();
+ const sanitized = messages.map(stripReasoningFromMessageForRouting);
+ const { routeName } = await archSelectRoute(sanitized, conversationId, locals);
+ resolvedRoute = routeName;
+ const fallbackModel = config.LLM_ROUTER_FALLBACK_MODEL || model.id;
+ const { candidates } = resolveRouteModels(routeName, routes, fallbackModel);
+ const primaryCandidateId = candidates[0];
+ if (!primaryCandidateId || primaryCandidateId === fallbackModel) {
+ runMcp = false;
+ } else {
+ const found = allModels?.find(
+ (candidate) =>
+ candidate.id === primaryCandidateId || candidate.name === primaryCandidateId
+ );
+ if (found) {
+ targetModel = found;
+ candidateModelId = primaryCandidateId;
+ } else {
+ runMcp = false;
+ }
+ }
+ }
+ } catch (error) {
+ logger.warn({ err: String(error) }, "[mcp] routing preflight failed");
+ runMcp = false;
+ }
+
+ return { runMcp, targetModel, candidateModelId, resolvedRoute };
+}
diff --git a/src/lib/server/textGeneration/mcp/runMcpFlow.ts b/src/lib/server/textGeneration/mcp/runMcpFlow.ts
new file mode 100644
index 0000000000000000000000000000000000000000..202d25c1679188a81818262b993a4d6c85cfb8d3
--- /dev/null
+++ b/src/lib/server/textGeneration/mcp/runMcpFlow.ts
@@ -0,0 +1,554 @@
+import { config } from "$lib/server/config";
+import { MessageUpdateType, type MessageUpdate } from "$lib/types/MessageUpdate";
+import type { EndpointMessage } from "../../endpoints/endpoints";
+import { getMcpServers } from "$lib/server/mcp/registry";
+import { isValidUrl } from "$lib/server/urlSafety";
+import { resetMcpToolsCache } from "$lib/server/mcp/tools";
+import { getOpenAiToolsForMcp } from "$lib/server/mcp/tools";
+import type {
+ ChatCompletionChunk,
+ ChatCompletionCreateParamsStreaming,
+ ChatCompletionMessageParam,
+ ChatCompletionContentPart,
+ ChatCompletionMessageToolCall,
+} from "openai/resources/chat/completions";
+import type { Stream } from "openai/streaming";
+import { buildToolPreprompt } from "../utils/toolPrompt";
+import { resolveRouterTarget } from "./routerResolution";
+import { executeToolCalls, type NormalizedToolCall } from "./toolInvocation";
+import { drainPool } from "$lib/server/mcp/clientPool";
+import { logger } from "../../logger";
+import type { TextGenerationContext } from "../types";
+import { hasAuthHeader, isStrictHfMcpLogin, hasNonEmptyToken } from "$lib/server/mcp/hf";
+
+export type RunMcpFlowContext = Pick<
+ TextGenerationContext,
+ "model" | "conv" | "assistant" | "forceMultimodal" | "forceTools" | "locals"
+> & { messages: EndpointMessage[] };
+
+export async function* runMcpFlow({
+ model,
+ conv,
+ messages,
+ assistant,
+ forceMultimodal,
+ forceTools,
+ locals,
+ preprompt,
+ abortSignal,
+}: RunMcpFlowContext & { preprompt?: string; abortSignal?: AbortSignal }): AsyncGenerator<
+ MessageUpdate,
+ boolean,
+ undefined
+> {
+ // Start from env-configured servers
+ let servers = getMcpServers();
+
+ // Merge in request-provided custom servers (if any)
+ try {
+ const reqMcp = (
+ locals as unknown as {
+ mcp?: {
+ selectedServers?: Array<{ name: string; url: string; headers?: Record }>;
+ selectedServerNames?: string[];
+ };
+ }
+ )?.mcp;
+ const custom = Array.isArray(reqMcp?.selectedServers) ? reqMcp?.selectedServers : [];
+ if (custom.length > 0) {
+ // Invalidate cached tool list when the set of servers changes at request-time
+ resetMcpToolsCache();
+ // Deduplicate by server name (request takes precedence)
+ const byName = new Map<
+ string,
+ { name: string; url: string; headers?: Record }
+ >();
+ for (const s of servers) byName.set(s.name, s);
+ for (const s of custom) byName.set(s.name, s);
+ servers = [...byName.values()];
+ }
+
+ // If the client specified a selection by name, filter to those
+ const names = Array.isArray(reqMcp?.selectedServerNames)
+ ? reqMcp?.selectedServerNames
+ : undefined;
+ if (Array.isArray(names)) {
+ servers = servers.filter((s) => names.includes(s.name));
+ }
+ } catch {
+ // ignore selection merge errors and proceed with env servers
+ }
+
+ // Enforce server-side safety (public HTTPS only, no private ranges)
+ servers = servers.filter((s) => {
+ try {
+ return isValidUrl(s.url);
+ } catch {
+ return false;
+ }
+ });
+ if (servers.length === 0) {
+ logger.warn("[mcp] all selected servers rejected by URL safety guard");
+ return false;
+ }
+
+ // Optionally attach the logged-in user's HF token to the official HF MCP server only.
+ // Never override an explicit Authorization header, and require token to look like an HF token.
+ try {
+ const shouldForward = config.MCP_FORWARD_HF_USER_TOKEN === "true";
+ const userToken =
+ (locals as unknown as { hfAccessToken?: string } | undefined)?.hfAccessToken ??
+ (locals as unknown as { token?: string } | undefined)?.token;
+
+ if (shouldForward && hasNonEmptyToken(userToken)) {
+ servers = servers.map((s) => {
+ try {
+ if (isStrictHfMcpLogin(s.url) && !hasAuthHeader(s.headers)) {
+ return {
+ ...s,
+ headers: { ...(s.headers ?? {}), Authorization: `Bearer ${userToken}` },
+ };
+ }
+ } catch {
+ // ignore URL parse errors and leave server unchanged
+ }
+ return s;
+ });
+ }
+ } catch {
+ // best-effort overlay; continue if anything goes wrong
+ }
+ logger.debug({ count: servers.length }, "[mcp] servers configured");
+ if (servers.length === 0) {
+ return false;
+ }
+
+ // Gate MCP flow based on model tool support (aggregated) with user override
+ try {
+ const supportsTools = Boolean((model as unknown as { supportsTools?: boolean }).supportsTools);
+ const toolsEnabled = Boolean(forceTools) || supportsTools;
+ if (!toolsEnabled) {
+ logger.debug({ model: model.id }, "[mcp] tools disabled for model; skipping MCP flow");
+ return false;
+ }
+ } catch {
+ // If anything goes wrong reading the flag, proceed (previous behavior)
+ }
+
+ const hasImageInput = messages.some((msg) =>
+ (msg.files ?? []).some(
+ (file) => typeof file?.mime === "string" && file.mime.startsWith("image/")
+ )
+ );
+
+ const { runMcp, targetModel, candidateModelId, resolvedRoute } = await resolveRouterTarget({
+ model,
+ messages,
+ conversationId: conv._id.toString(),
+ hasImageInput,
+ locals,
+ });
+
+ if (!runMcp) {
+ logger.debug("[mcp] runMcp=false (router did not select tools path)");
+ return false;
+ }
+
+ const { tools: oaTools, mapping } = await getOpenAiToolsForMcp(servers, { signal: abortSignal });
+ logger.debug({ tools: oaTools.length }, "[mcp] openai tool defs built");
+ if (oaTools.length === 0) {
+ return false;
+ }
+
+ try {
+ const { OpenAI } = await import("openai");
+
+ // Capture provider header (x-inference-provider) from the upstream OpenAI-compatible server.
+ let providerHeader: string | undefined;
+ const captureProviderFetch = async (
+ input: RequestInfo | URL,
+ init?: RequestInit
+ ): Promise => {
+ const res = await fetch(input, init);
+ const p = res.headers.get("x-inference-provider");
+ if (p && !providerHeader) providerHeader = p;
+ return res;
+ };
+
+ const openai = new OpenAI({
+ apiKey: config.OPENAI_API_KEY || config.HF_TOKEN || "sk-",
+ baseURL: config.OPENAI_BASE_URL,
+ fetch: captureProviderFetch,
+ });
+
+ const mmEnabled = (forceMultimodal ?? false) || targetModel.multimodal;
+ logger.debug({ model: targetModel.id ?? targetModel.name, mmEnabled }, "[mcp] target model");
+ const toOpenAiMessage = (msg: EndpointMessage): ChatCompletionMessageParam => {
+ if (msg.from === "user" && mmEnabled) {
+ const parts: ChatCompletionContentPart[] = [{ type: "text", text: msg.content }];
+ for (const file of msg.files ?? []) {
+ if (typeof file?.mime === "string" && file.mime.startsWith("image/")) {
+ const rawValue = file.value as unknown;
+ let encoded: string;
+ if (typeof rawValue === "string") {
+ encoded = rawValue;
+ } else if (rawValue instanceof Uint8Array) {
+ encoded = Buffer.from(rawValue).toString("base64");
+ } else if (rawValue instanceof ArrayBuffer) {
+ encoded = Buffer.from(rawValue).toString("base64");
+ } else {
+ encoded = String(rawValue ?? "");
+ }
+ const url = encoded.startsWith("data:")
+ ? encoded
+ : `data:${file.mime};base64,${encoded}`;
+ parts.push({ type: "image_url", image_url: { url, detail: "auto" } });
+ }
+ }
+ return { role: msg.from, content: parts };
+ }
+ return { role: msg.from, content: msg.content };
+ };
+
+ let messagesOpenAI: ChatCompletionMessageParam[] = messages.map(toOpenAiMessage);
+ const toolPreprompt = buildToolPreprompt(oaTools);
+ const prepromptPieces: string[] = [];
+ if (toolPreprompt.trim().length > 0) {
+ prepromptPieces.push(toolPreprompt);
+ }
+ if (typeof preprompt === "string" && preprompt.trim().length > 0) {
+ prepromptPieces.push(preprompt);
+ }
+ const mergedPreprompt = prepromptPieces.join("\n\n");
+ const hasSystemMessage = messagesOpenAI.length > 0 && messagesOpenAI[0]?.role === "system";
+ if (hasSystemMessage) {
+ if (mergedPreprompt.length > 0) {
+ const existing = messagesOpenAI[0].content ?? "";
+ const existingText = typeof existing === "string" ? existing : "";
+ messagesOpenAI[0].content = mergedPreprompt + (existingText ? "\n\n" + existingText : "");
+ }
+ } else if (mergedPreprompt.length > 0) {
+ messagesOpenAI = [{ role: "system", content: mergedPreprompt }, ...messagesOpenAI];
+ }
+
+ // Work around servers that reject `system` role
+ if (
+ typeof config.OPENAI_BASE_URL === "string" &&
+ config.OPENAI_BASE_URL.length > 0 &&
+ (config.OPENAI_BASE_URL.includes("hf.space") ||
+ config.OPENAI_BASE_URL.includes("gradio.app")) &&
+ messagesOpenAI[0]?.role === "system"
+ ) {
+ messagesOpenAI[0] = { ...messagesOpenAI[0], role: "user" };
+ }
+
+ const parameters = { ...targetModel.parameters, ...assistant?.generateSettings } as Record<
+ string,
+ unknown
+ >;
+ const maxTokens =
+ (parameters?.max_tokens as number | undefined) ??
+ (parameters?.max_new_tokens as number | undefined) ??
+ (parameters?.max_completion_tokens as number | undefined);
+
+ const stopSequences =
+ typeof parameters?.stop === "string"
+ ? parameters.stop
+ : Array.isArray(parameters?.stop)
+ ? (parameters.stop as string[])
+ : undefined;
+
+ const completionBase: Omit = {
+ model: targetModel.id ?? targetModel.name,
+ stream: true,
+ temperature: typeof parameters?.temperature === "number" ? parameters.temperature : undefined,
+ top_p: typeof parameters?.top_p === "number" ? parameters.top_p : undefined,
+ frequency_penalty:
+ typeof parameters?.frequency_penalty === "number"
+ ? parameters.frequency_penalty
+ : typeof parameters?.repetition_penalty === "number"
+ ? parameters.repetition_penalty
+ : undefined,
+ presence_penalty:
+ typeof parameters?.presence_penalty === "number" ? parameters.presence_penalty : undefined,
+ stop: stopSequences,
+ max_tokens: typeof maxTokens === "number" ? maxTokens : undefined,
+ tools: oaTools,
+ tool_choice: "auto",
+ };
+
+ const toPrimitive = (value: unknown) => {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ return value;
+ }
+ return undefined;
+ };
+
+ const parseArgs = (raw: unknown): Record => {
+ if (typeof raw !== "string" || raw.trim().length === 0) return {};
+ try {
+ return JSON.parse(raw);
+ } catch {
+ return {};
+ }
+ };
+
+ const processToolOutput = (
+ text: string
+ ): {
+ annotated: string;
+ sources: { index: number; link: string }[];
+ } => ({ annotated: text, sources: [] });
+
+ let lastAssistantContent = "";
+ let streamedContent = false;
+ // Track whether we're inside a block when the upstream streams
+ // provider-specific reasoning tokens (e.g. `reasoning` or `reasoning_content`).
+ let thinkOpen = false;
+
+ if (resolvedRoute && candidateModelId) {
+ yield {
+ type: MessageUpdateType.RouterMetadata,
+ route: resolvedRoute,
+ model: candidateModelId,
+ };
+ logger.debug(
+ { route: resolvedRoute, model: candidateModelId },
+ "[mcp] router metadata emitted"
+ );
+ }
+
+ for (let loop = 0; loop < 10; loop += 1) {
+ lastAssistantContent = "";
+ streamedContent = false;
+
+ const completionRequest: ChatCompletionCreateParamsStreaming = {
+ ...completionBase,
+ messages: messagesOpenAI,
+ };
+
+ const completionStream: Stream = await openai.chat.completions.create(
+ completionRequest,
+ {
+ signal: abortSignal,
+ headers: {
+ "ChatUI-Conversation-ID": conv._id.toString(),
+ "X-use-cache": "false",
+ },
+ }
+ );
+
+ // If provider header was exposed, notify UI so it can render "via {provider}".
+ if (providerHeader) {
+ yield {
+ type: MessageUpdateType.RouterMetadata,
+ route: "",
+ model: "",
+ provider: providerHeader as unknown as import("@huggingface/inference").InferenceProvider,
+ };
+ logger.debug({ provider: providerHeader }, "[mcp] provider metadata emitted");
+ }
+
+ const toolCallState: Record = {};
+ let sawToolCall = false;
+ let tokenCount = 0;
+ for await (const chunk of completionStream) {
+ const choice = chunk.choices?.[0];
+ const delta = choice?.delta;
+ if (!delta) continue;
+
+ const chunkToolCalls = delta.tool_calls ?? [];
+ if (chunkToolCalls.length > 0) {
+ sawToolCall = true;
+ for (const call of chunkToolCalls) {
+ const toolCall = call as unknown as {
+ index?: number;
+ id?: string;
+ function?: { name?: string; arguments?: string };
+ };
+ const index = toolCall.index ?? 0;
+ const current = toolCallState[index] ?? { arguments: "" };
+ if (toolCall.id) current.id = toolCall.id;
+ if (toolCall.function?.name) current.name = toolCall.function.name;
+ if (toolCall.function?.arguments) current.arguments += toolCall.function.arguments;
+ toolCallState[index] = current;
+ }
+ }
+
+ const deltaContent = (() => {
+ if (typeof delta.content === "string") return delta.content;
+ const maybeParts = delta.content as unknown;
+ if (Array.isArray(maybeParts)) {
+ return maybeParts
+ .map((part) =>
+ typeof part === "object" &&
+ part !== null &&
+ "text" in part &&
+ typeof (part as Record).text === "string"
+ ? String((part as Record).text)
+ : ""
+ )
+ .join("");
+ }
+ return "";
+ })();
+
+ // Provider-dependent reasoning fields (e.g., `reasoning` or `reasoning_content`).
+ const deltaReasoning: string =
+ typeof (delta as unknown as Record)?.reasoning === "string"
+ ? ((delta as unknown as { reasoning?: string }).reasoning as string)
+ : typeof (delta as unknown as Record)?.reasoning_content === "string"
+ ? ((delta as unknown as { reasoning_content?: string }).reasoning_content as string)
+ : "";
+
+ // Merge reasoning + content into a single combined token stream, mirroring
+ // the OpenAI adapter so the UI can auto-detect blocks.
+ let combined = "";
+ if (deltaReasoning && deltaReasoning.length > 0) {
+ if (!thinkOpen) {
+ combined += "" + deltaReasoning;
+ thinkOpen = true;
+ } else {
+ combined += deltaReasoning;
+ }
+ }
+
+ if (deltaContent && deltaContent.length > 0) {
+ if (thinkOpen) {
+ combined += " " + deltaContent;
+ thinkOpen = false;
+ } else {
+ combined += deltaContent;
+ }
+ }
+
+ if (combined.length > 0) {
+ lastAssistantContent += combined;
+ if (!sawToolCall) {
+ streamedContent = true;
+ yield { type: MessageUpdateType.Stream, token: combined };
+ tokenCount += combined.length;
+ }
+ }
+ }
+ logger.debug(
+ { sawToolCalls: Object.keys(toolCallState).length > 0, tokens: tokenCount, loop },
+ "[mcp] completion stream closed"
+ );
+
+ if (Object.keys(toolCallState).length > 0) {
+ // If any streamed call is missing id, perform a quick non-stream retry to recover full tool_calls with ids
+ const missingId = Object.values(toolCallState).some((c) => c?.name && !c?.id);
+ let calls: NormalizedToolCall[];
+ if (missingId) {
+ const nonStream = await openai.chat.completions.create(
+ { ...completionBase, messages: messagesOpenAI, stream: false },
+ { signal: abortSignal }
+ );
+ const tc = nonStream.choices?.[0]?.message?.tool_calls ?? [];
+ calls = tc.map((t) => ({
+ id: t.id,
+ name: t.function?.name ?? "",
+ arguments: t.function?.arguments ?? "",
+ }));
+ } else {
+ calls = Object.values(toolCallState)
+ .map((c) => (c?.id && c?.name ? c : undefined))
+ .filter(Boolean)
+ .map((c) => ({
+ id: c?.id ?? "",
+ name: c?.name ?? "",
+ arguments: c?.arguments ?? "",
+ })) as NormalizedToolCall[];
+ }
+
+ // Include the assistant message with tool_calls so the next round
+ // sees both the calls and their outputs, matching MCP branch behavior.
+ const toolCalls: ChatCompletionMessageToolCall[] = calls.map((call) => ({
+ id: call.id,
+ type: "function",
+ function: { name: call.name, arguments: call.arguments },
+ }));
+
+ // Avoid sending content back to the model alongside tool_calls
+ // to prevent confusing follow-up reasoning. Strip any think blocks.
+ const assistantContentForToolMsg = lastAssistantContent.replace(
+ /[\s\S]*?(?:<\/think>|$)/g,
+ ""
+ );
+ const assistantToolMessage: ChatCompletionMessageParam = {
+ role: "assistant",
+ content: assistantContentForToolMsg,
+ tool_calls: toolCalls,
+ };
+
+ const exec = executeToolCalls({
+ calls,
+ mapping,
+ servers,
+ parseArgs,
+ toPrimitive,
+ processToolOutput,
+ abortSignal,
+ });
+ let toolMsgCount = 0;
+ let toolRunCount = 0;
+ for await (const event of exec) {
+ if (event.type === "update") {
+ yield event.update;
+ } else {
+ messagesOpenAI = [
+ ...messagesOpenAI,
+ assistantToolMessage,
+ ...(event.summary.toolMessages ?? []),
+ ];
+ toolMsgCount = event.summary.toolMessages?.length ?? 0;
+ toolRunCount = event.summary.toolRuns?.length ?? 0;
+ logger.debug(
+ { toolMsgCount, toolRunCount },
+ "[mcp] tools executed; continuing loop for follow-up completion"
+ );
+ }
+ }
+ // Continue loop: next iteration will use tool messages to get the final content
+ continue;
+ }
+
+ // No tool calls: finalize and return
+ // If a block is still open, close it for the final output
+ if (thinkOpen) {
+ lastAssistantContent += " ";
+ thinkOpen = false;
+ }
+ if (!streamedContent && lastAssistantContent.trim().length > 0) {
+ yield { type: MessageUpdateType.Stream, token: lastAssistantContent };
+ }
+ yield {
+ type: MessageUpdateType.FinalAnswer,
+ text: lastAssistantContent,
+ interrupted: false,
+ };
+ logger.debug({ length: lastAssistantContent.length, loop }, "[mcp] final answer emitted");
+ return true;
+ }
+ logger.warn("[mcp] exceeded tool-followup loops; falling back");
+ } catch (err) {
+ const msg = String(err ?? "");
+ const isAbort =
+ (abortSignal && abortSignal.aborted) ||
+ msg.includes("AbortError") ||
+ msg.includes("APIUserAbortError") ||
+ msg.includes("Request was aborted");
+ if (isAbort) {
+ // Expected on user stop; keep logs quiet and do not treat as error
+ logger.debug("[mcp] aborted by user");
+ return false;
+ }
+ logger.warn({ err: msg }, "[mcp] flow failed, falling back to default endpoint");
+ } finally {
+ // ensure MCP clients are closed after the turn
+ await drainPool();
+ }
+
+ return false;
+}
diff --git a/src/lib/server/textGeneration/mcp/toolInvocation.ts b/src/lib/server/textGeneration/mcp/toolInvocation.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5fd1f7da7b58f093aa5a0e008f24876ee7e66615
--- /dev/null
+++ b/src/lib/server/textGeneration/mcp/toolInvocation.ts
@@ -0,0 +1,284 @@
+import { randomUUID } from "crypto";
+import { logger } from "../../logger";
+import type { MessageUpdate } from "$lib/types/MessageUpdate";
+import { MessageToolUpdateType, MessageUpdateType } from "$lib/types/MessageUpdate";
+import { ToolResultStatus } from "$lib/types/Tool";
+import type { ChatCompletionMessageParam } from "openai/resources/chat/completions";
+import type { McpToolMapping } from "$lib/server/mcp/tools";
+import type { McpServerConfig } from "$lib/server/mcp/httpClient";
+import { callMcpTool, type McpToolTextResponse } from "$lib/server/mcp/httpClient";
+import { getClient } from "$lib/server/mcp/clientPool";
+import type { Client } from "@modelcontextprotocol/sdk/client";
+
+export type Primitive = string | number | boolean;
+
+export type ToolRun = {
+ name: string;
+ parameters: Record;
+ output: string;
+};
+
+export interface NormalizedToolCall {
+ id: string;
+ name: string;
+ arguments: string;
+}
+
+export interface ExecuteToolCallsParams {
+ calls: NormalizedToolCall[];
+ mapping: Record;
+ servers: McpServerConfig[];
+ parseArgs: (raw: unknown) => Record;
+ toPrimitive: (value: unknown) => Primitive | undefined;
+ processToolOutput: (text: string) => {
+ annotated: string;
+ sources: { index: number; link: string }[];
+ };
+ abortSignal?: AbortSignal;
+ toolTimeoutMs?: number;
+}
+
+export interface ToolCallExecutionResult {
+ toolMessages: ChatCompletionMessageParam[];
+ toolRuns: ToolRun[];
+ finalAnswer?: { text: string; interrupted: boolean };
+}
+
+export type ToolExecutionEvent =
+ | { type: "update"; update: MessageUpdate }
+ | { type: "complete"; summary: ToolCallExecutionResult };
+
+const serverMap = (servers: McpServerConfig[]): Map => {
+ const map = new Map();
+ for (const server of servers) {
+ if (server?.name) {
+ map.set(server.name, server);
+ }
+ }
+ return map;
+};
+
+export async function* executeToolCalls({
+ calls,
+ mapping,
+ servers,
+ parseArgs,
+ toPrimitive,
+ processToolOutput,
+ abortSignal,
+ toolTimeoutMs = 30_000,
+}: ExecuteToolCallsParams): AsyncGenerator {
+ const toolMessages: ChatCompletionMessageParam[] = [];
+ const toolRuns: ToolRun[] = [];
+ const serverLookup = serverMap(servers);
+ // Pre-emit call + ETA updates and prepare tasks
+ type TaskResult = {
+ index: number;
+ output?: string;
+ structured?: unknown;
+ blocks?: unknown[];
+ error?: string;
+ uuid: string;
+ paramsClean: Record;
+ };
+
+ const prepared = calls.map((call) => {
+ const argsObj = parseArgs(call.arguments);
+ const paramsClean: Record = {};
+ for (const [k, v] of Object.entries(argsObj ?? {})) {
+ const prim = toPrimitive(v);
+ if (prim !== undefined) paramsClean[k] = prim;
+ }
+ return { call, argsObj, paramsClean, uuid: randomUUID() };
+ });
+
+ for (const p of prepared) {
+ yield {
+ type: "update",
+ update: {
+ type: MessageUpdateType.Tool,
+ subtype: MessageToolUpdateType.Call,
+ uuid: p.uuid,
+ call: { name: p.call.name, parameters: p.paramsClean },
+ },
+ };
+ yield {
+ type: "update",
+ update: {
+ type: MessageUpdateType.Tool,
+ subtype: MessageToolUpdateType.ETA,
+ uuid: p.uuid,
+ eta: 10,
+ },
+ };
+ }
+
+ // Preload clients per distinct server used in this batch
+ const distinctServerNames = Array.from(
+ new Set(prepared.map((p) => mapping[p.call.name]?.server).filter(Boolean) as string[])
+ );
+ const clientMap = new Map();
+ await Promise.all(
+ distinctServerNames.map(async (name) => {
+ const cfg = serverLookup.get(name);
+ if (!cfg) return;
+ try {
+ const client = await getClient(cfg, abortSignal);
+ clientMap.set(name, client);
+ } catch (e) {
+ logger.warn({ server: name, err: String(e) }, "[mcp] failed to connect client");
+ }
+ })
+ );
+
+ // Async queue to stream results in finish order
+ function createQueue() {
+ const items: T[] = [];
+ const waiters: Array<(v: IteratorResult) => void> = [];
+ let closed = false;
+ return {
+ push(item: T) {
+ const waiter = waiters.shift();
+ if (waiter) waiter({ value: item, done: false });
+ else items.push(item);
+ },
+ close() {
+ closed = true;
+ let waiter: ((v: IteratorResult) => void) | undefined;
+ while ((waiter = waiters.shift())) {
+ waiter({ value: undefined as unknown as T, done: true });
+ }
+ },
+ async *iterator() {
+ for (;;) {
+ if (items.length) {
+ const first = items.shift();
+ if (first !== undefined) yield first as T;
+ continue;
+ }
+ if (closed) return;
+ const value: IteratorResult = await new Promise((res) => waiters.push(res));
+ if (value.done) return;
+ yield value.value as T;
+ }
+ },
+ };
+ }
+
+ const q = createQueue();
+
+ const tasks = prepared.map(async (p, index) => {
+ const mappingEntry = mapping[p.call.name];
+ if (!mappingEntry) {
+ q.push({
+ index,
+ error: `Unknown MCP function: ${p.call.name}`,
+ uuid: p.uuid,
+ paramsClean: p.paramsClean,
+ });
+ return;
+ }
+ const serverCfg = serverLookup.get(mappingEntry.server);
+ if (!serverCfg) {
+ q.push({
+ index,
+ error: `Unknown MCP server: ${mappingEntry.server}`,
+ uuid: p.uuid,
+ paramsClean: p.paramsClean,
+ });
+ return;
+ }
+ const client = clientMap.get(mappingEntry.server);
+ try {
+ logger.debug(
+ { server: mappingEntry.server, tool: mappingEntry.tool, parameters: p.paramsClean },
+ "[mcp] invoking tool"
+ );
+ const toolResponse: McpToolTextResponse = await callMcpTool(
+ serverCfg,
+ mappingEntry.tool,
+ p.argsObj,
+ {
+ client,
+ signal: abortSignal,
+ timeoutMs: toolTimeoutMs,
+ }
+ );
+ const { annotated } = processToolOutput(toolResponse.text ?? "");
+ logger.debug(
+ { server: mappingEntry.server, tool: mappingEntry.tool },
+ "[mcp] tool call completed"
+ );
+ q.push({
+ index,
+ output: annotated,
+ structured: toolResponse.structured,
+ blocks: toolResponse.content,
+ uuid: p.uuid,
+ paramsClean: p.paramsClean,
+ });
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ logger.warn(
+ { server: mappingEntry.server, tool: mappingEntry.tool, err: message },
+ "[mcp] tool call failed"
+ );
+ q.push({ index, error: message, uuid: p.uuid, paramsClean: p.paramsClean });
+ }
+ });
+
+ // kick off and stream as they finish
+ Promise.allSettled(tasks).then(() => q.close());
+
+ const results: TaskResult[] = [];
+ for await (const r of q.iterator()) {
+ results.push(r);
+ if (r.error) {
+ yield {
+ type: "update",
+ update: {
+ type: MessageUpdateType.Tool,
+ subtype: MessageToolUpdateType.Error,
+ uuid: r.uuid,
+ message: r.error,
+ },
+ };
+ } else {
+ yield {
+ type: "update",
+ update: {
+ type: MessageUpdateType.Tool,
+ subtype: MessageToolUpdateType.Result,
+ uuid: r.uuid,
+ result: {
+ status: ToolResultStatus.Success,
+ call: { name: prepared[r.index].call.name, parameters: r.paramsClean },
+ outputs: [
+ {
+ text: r.output ?? "",
+ structured: r.structured,
+ content: r.blocks,
+ } as unknown as Record,
+ ],
+ display: true,
+ },
+ },
+ };
+ }
+ }
+
+ // Collate outputs in original call order
+ results.sort((a, b) => a.index - b.index);
+ for (const r of results) {
+ const name = prepared[r.index].call.name;
+ const id = prepared[r.index].call.id;
+ if (!r.error) {
+ const output = r.output ?? "";
+ toolRuns.push({ name, parameters: r.paramsClean, output });
+ // For the LLM follow-up call, we keep only the textual output
+ toolMessages.push({ role: "tool", tool_call_id: id, content: output });
+ }
+ }
+
+ yield { type: "complete", summary: { toolMessages, toolRuns } };
+}
diff --git a/src/lib/server/textGeneration/reasoning.ts b/src/lib/server/textGeneration/reasoning.ts
new file mode 100644
index 0000000000000000000000000000000000000000..ecfb8d096f21b3cd7ba17f1c22b68a4b1e380940
--- /dev/null
+++ b/src/lib/server/textGeneration/reasoning.ts
@@ -0,0 +1,23 @@
+import { generateFromDefaultEndpoint } from "$lib/server/generateFromDefaultEndpoint";
+import { MessageUpdateType } from "$lib/types/MessageUpdate";
+
+export async function generateSummaryOfReasoning(
+ reasoning: string,
+ modelId: string | undefined,
+ locals: App.Locals | undefined
+): Promise {
+ const prompt = `Summarize concisely the following reasoning for the user. Keep it short (one short paragraph).\n\n${reasoning}`;
+ const summary = await (async () => {
+ const it = generateFromDefaultEndpoint({
+ messages: [{ from: "user", content: prompt }],
+ modelId,
+ locals,
+ });
+ let out = "";
+ for await (const update of it) {
+ if (update.type === MessageUpdateType.Stream) out += update.token;
+ }
+ return out;
+ })();
+ return summary.trim();
+}
diff --git a/src/lib/server/textGeneration/types.ts b/src/lib/server/textGeneration/types.ts
index 791251510c7ef15598d34bab35d0274d779a4e47..9ea9bb2e31f315058ae1b7274d1cab25709831f9 100644
--- a/src/lib/server/textGeneration/types.ts
+++ b/src/lib/server/textGeneration/types.ts
@@ -15,6 +15,8 @@ export interface TextGenerationContext {
username?: string;
/** Force-enable multimodal handling for endpoints that support it */
forceMultimodal?: boolean;
+ /** Force-enable tool calling even if model does not advertise support */
+ forceTools?: boolean;
locals: App.Locals | undefined;
abortController: AbortController;
}
diff --git a/src/lib/server/textGeneration/utils/routing.ts b/src/lib/server/textGeneration/utils/routing.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1f6c5ea4a49f47c182141375a952e28792b47a61
--- /dev/null
+++ b/src/lib/server/textGeneration/utils/routing.ts
@@ -0,0 +1,21 @@
+import type { EndpointMessage } from "../../endpoints/endpoints";
+
+const ROUTER_REASONING_REGEX = /[\s\S]*?(?:<\/think>|$)/g;
+
+export function stripReasoningBlocks(text: string): string {
+ const stripped = text.replace(ROUTER_REASONING_REGEX, "");
+ return stripped === text ? text : stripped.trim();
+}
+
+export function stripReasoningFromMessageForRouting(message: EndpointMessage): EndpointMessage {
+ const clone = { ...message } as EndpointMessage & { reasoning?: string };
+ if ("reasoning" in clone) {
+ delete clone.reasoning;
+ }
+ const content =
+ typeof message.content === "string" ? stripReasoningBlocks(message.content) : message.content;
+ return {
+ ...clone,
+ content,
+ };
+}
diff --git a/src/lib/server/textGeneration/utils/toolPrompt.ts b/src/lib/server/textGeneration/utils/toolPrompt.ts
new file mode 100644
index 0000000000000000000000000000000000000000..332db6269d19956b0017d3368623f921df467988
--- /dev/null
+++ b/src/lib/server/textGeneration/utils/toolPrompt.ts
@@ -0,0 +1,15 @@
+import type { OpenAiTool } from "$lib/server/mcp/tools";
+
+export function buildToolPreprompt(tools: OpenAiTool[]): string {
+ if (!Array.isArray(tools) || tools.length === 0) return "";
+ const names = tools
+ .map((t) => (t?.function?.name ? String(t.function.name) : ""))
+ .filter((s) => s.length > 0);
+ if (names.length === 0) return "";
+ const currentDate = new Date().toLocaleDateString("en-US", {
+ year: "numeric",
+ month: "long",
+ day: "numeric",
+ });
+ return `You can use the following tools if helpful: ${names.join(", ")}. Today's date: ${currentDate}. If a tool generates an image, you can inline it directly: .`;
+}
diff --git a/src/lib/server/urlSafety.ts b/src/lib/server/urlSafety.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5bedb9c943c0d2d3ec008c8ed20fc9047c32fa95
--- /dev/null
+++ b/src/lib/server/urlSafety.ts
@@ -0,0 +1,25 @@
+// Shared server-side URL safety helper (exact behavior preserved)
+export function isValidUrl(urlString: string): boolean {
+ try {
+ const url = new URL(urlString);
+ // Only allow HTTPS protocol
+ if (url.protocol !== "https:") {
+ return false;
+ }
+ // Prevent localhost/private IPs (basic check)
+ const hostname = url.hostname.toLowerCase();
+ if (
+ hostname === "localhost" ||
+ hostname.startsWith("127.") ||
+ hostname.startsWith("192.168.") ||
+ hostname.startsWith("172.16.") ||
+ hostname === "[::1]" ||
+ hostname === "0.0.0.0"
+ ) {
+ return false;
+ }
+ return true;
+ } catch {
+ return false;
+ }
+}
diff --git a/src/lib/stores/mcpServers.ts b/src/lib/stores/mcpServers.ts
new file mode 100644
index 0000000000000000000000000000000000000000..44277e640d0447b6427504eb5e82625ecaf73b6d
--- /dev/null
+++ b/src/lib/stores/mcpServers.ts
@@ -0,0 +1,247 @@
+/**
+ * MCP Servers Store
+ * Manages base (env-configured) and custom (user-added) MCP servers
+ * Stores custom servers and selection state in browser localStorage
+ */
+
+import { writable, derived } from "svelte/store";
+import { base } from "$app/paths";
+import { browser } from "$app/environment";
+import type { MCPServer, ServerStatus, MCPTool } from "$lib/types/Tool";
+
+const STORAGE_KEYS = {
+ CUSTOM_SERVERS: "mcp:custom-servers",
+ SELECTED_IDS: "mcp:selected-ids",
+} as const;
+
+// Load custom servers from localStorage
+function loadCustomServers(): MCPServer[] {
+ if (!browser) return [];
+
+ try {
+ const json = localStorage.getItem(STORAGE_KEYS.CUSTOM_SERVERS);
+ return json ? JSON.parse(json) : [];
+ } catch (error) {
+ console.error("Failed to load custom MCP servers from localStorage:", error);
+ return [];
+ }
+}
+
+// Load selected server IDs from localStorage
+function loadSelectedIds(): Set {
+ if (!browser) return new Set();
+
+ try {
+ const json = localStorage.getItem(STORAGE_KEYS.SELECTED_IDS);
+ const ids: string[] = json ? JSON.parse(json) : [];
+ return new Set(ids);
+ } catch (error) {
+ console.error("Failed to load selected MCP server IDs from localStorage:", error);
+ return new Set();
+ }
+}
+
+// Save custom servers to localStorage
+function saveCustomServers(servers: MCPServer[]) {
+ if (!browser) return;
+
+ try {
+ localStorage.setItem(STORAGE_KEYS.CUSTOM_SERVERS, JSON.stringify(servers));
+ } catch (error) {
+ console.error("Failed to save custom MCP servers to localStorage:", error);
+ }
+}
+
+// Save selected IDs to localStorage
+function saveSelectedIds(ids: Set) {
+ if (!browser) return;
+
+ try {
+ localStorage.setItem(STORAGE_KEYS.SELECTED_IDS, JSON.stringify([...ids]));
+ } catch (error) {
+ console.error("Failed to save selected MCP server IDs to localStorage:", error);
+ }
+}
+
+// Store for all servers (base + custom)
+export const allMcpServers = writable([]);
+
+// Store for selected server IDs
+export const selectedServerIds = writable>(loadSelectedIds());
+
+// Auto-persist selected IDs when they change
+if (browser) {
+ selectedServerIds.subscribe((ids) => {
+ saveSelectedIds(ids);
+ });
+}
+
+// Derived store: only enabled servers
+export const enabledServers = derived([allMcpServers, selectedServerIds], ([$all, $selected]) =>
+ $all.filter((s) => $selected.has(s.id))
+);
+
+// Derived store: count of enabled servers
+export const enabledServersCount = derived(enabledServers, ($enabled) => $enabled.length);
+
+// Note: Authorization overlay (with user's HF token) for the Hugging Face MCP host
+// is applied server-side when enabled via MCP_FORWARD_HF_USER_TOKEN.
+
+/**
+ * Refresh base servers from API and merge with custom servers
+ */
+export async function refreshMcpServers() {
+ try {
+ const response = await fetch(`${base}/api/mcp/servers`);
+ if (!response.ok) {
+ throw new Error(`Failed to fetch base servers: ${response.statusText}`);
+ }
+
+ const baseServers: MCPServer[] = await response.json();
+ const customServers = loadCustomServers();
+
+ // Merge base and custom servers
+ const merged = [...baseServers, ...customServers];
+ allMcpServers.set(merged);
+
+ // Prune selected IDs that no longer correspond to existing servers
+ const validIds = new Set(merged.map((s) => s.id));
+ selectedServerIds.update(($ids) => {
+ const filtered = new Set([...$ids].filter((id) => validIds.has(id)));
+ return filtered;
+ });
+ } catch (error) {
+ console.error("Failed to refresh MCP servers:", error);
+ // On error, just use custom servers
+ allMcpServers.set(loadCustomServers());
+ }
+}
+
+/**
+ * Toggle a server on/off
+ */
+export function toggleServer(id: string) {
+ selectedServerIds.update(($ids) => {
+ const newSet = new Set($ids);
+ if (newSet.has(id)) {
+ newSet.delete(id);
+ } else {
+ newSet.add(id);
+ }
+ return newSet;
+ });
+}
+
+/**
+ * Add a custom MCP server
+ */
+export function addCustomServer(server: Omit): string {
+ const newServer: MCPServer = {
+ ...server,
+ id: crypto.randomUUID(),
+ type: "custom",
+ status: "disconnected",
+ };
+
+ const customServers = loadCustomServers();
+ customServers.push(newServer);
+ saveCustomServers(customServers);
+
+ // Refresh all servers to include the new one
+ refreshMcpServers();
+
+ return newServer.id;
+}
+
+/**
+ * Update an existing custom server
+ */
+export function updateCustomServer(id: string, updates: Partial) {
+ const customServers = loadCustomServers();
+ const index = customServers.findIndex((s) => s.id === id);
+
+ if (index !== -1) {
+ customServers[index] = { ...customServers[index], ...updates };
+ saveCustomServers(customServers);
+ refreshMcpServers();
+ }
+}
+
+/**
+ * Delete a custom server
+ */
+export function deleteCustomServer(id: string) {
+ const customServers = loadCustomServers();
+ const filtered = customServers.filter((s) => s.id !== id);
+ saveCustomServers(filtered);
+
+ // Also remove from selected IDs
+ selectedServerIds.update(($ids) => {
+ const newSet = new Set($ids);
+ newSet.delete(id);
+ return newSet;
+ });
+
+ refreshMcpServers();
+}
+
+/**
+ * Update server status (from health check)
+ */
+export function updateServerStatus(
+ id: string,
+ status: ServerStatus,
+ errorMessage?: string,
+ tools?: MCPTool[],
+ authRequired?: boolean
+) {
+ allMcpServers.update(($servers) =>
+ $servers.map((s) =>
+ s.id === id
+ ? {
+ ...s,
+ status,
+ errorMessage,
+ tools,
+ authRequired,
+ }
+ : s
+ )
+ );
+}
+
+/**
+ * Run health check on a server
+ */
+export async function healthCheckServer(
+ server: MCPServer
+): Promise<{ ready: boolean; tools?: MCPTool[]; error?: string }> {
+ try {
+ updateServerStatus(server.id, "connecting");
+
+ const response = await fetch(`${base}/api/mcp/health`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ url: server.url, headers: server.headers }),
+ });
+
+ const result = await response.json();
+
+ if (result.ready && result.tools) {
+ updateServerStatus(server.id, "connected", undefined, result.tools, false);
+ return { ready: true, tools: result.tools };
+ } else {
+ updateServerStatus(server.id, "error", result.error, undefined, Boolean(result.authRequired));
+ return { ready: false, error: result.error };
+ }
+ } catch (error) {
+ const errorMessage = error instanceof Error ? error.message : "Unknown error";
+ updateServerStatus(server.id, "error", errorMessage);
+ return { ready: false, error: errorMessage };
+ }
+}
+
+// Initialize on module load
+if (browser) {
+ refreshMcpServers();
+}
diff --git a/src/lib/stores/settings.ts b/src/lib/stores/settings.ts
index 450b2ad4b0ddcd6f2a3bdbce1f8ab9f41c718071..2f7e0f5f5ba2ea5c828accaf6a922581e71d41b7 100644
--- a/src/lib/stores/settings.ts
+++ b/src/lib/stores/settings.ts
@@ -12,6 +12,7 @@ type SettingsStore = {
activeModel: string;
customPrompts: Record;
multimodalOverrides: Record;
+ toolsOverrides: Record;
recentlySaved: boolean;
disableStream: boolean;
directPaste: boolean;
diff --git a/src/lib/types/Message.ts b/src/lib/types/Message.ts
index 40eb3cd0ee2ae1c5a6ea8af1d8ffc0f4579d5a47..81bf052389abd52fc6a656783ac108c644acbfff 100644
--- a/src/lib/types/Message.ts
+++ b/src/lib/types/Message.ts
@@ -9,6 +9,8 @@ export type Message = Partial & {
content: string;
updates?: MessageUpdate[];
+ // Optional server or client-side reasoning content ( blocks)
+ reasoning?: string;
score?: -1 | 0 | 1;
/**
* Either contains the base64 encoded image data
diff --git a/src/lib/types/MessageUpdate.ts b/src/lib/types/MessageUpdate.ts
index 6400de02b56d9088c15cfac0c51c0f38710425f6..28d69214e3ab19d40212b8fb37f59a81fff260e2 100644
--- a/src/lib/types/MessageUpdate.ts
+++ b/src/lib/types/MessageUpdate.ts
@@ -1,8 +1,10 @@
import type { InferenceProvider } from "@huggingface/inference";
+import type { ToolCall, ToolResult } from "$lib/types/Tool";
export type MessageUpdate =
| MessageStatusUpdate
| MessageTitleUpdate
+ | MessageToolUpdate
| MessageStreamUpdate
| MessageFileUpdate
| MessageFinalAnswerUpdate
@@ -12,6 +14,7 @@ export type MessageUpdate =
export enum MessageUpdateType {
Status = "status",
Title = "title",
+ Tool = "tool",
Stream = "stream",
File = "file",
FinalAnswer = "finalAnswer",
@@ -43,6 +46,43 @@ export interface MessageStreamUpdate {
token: string;
}
+// Tool updates (for MCP and function calling)
+export enum MessageToolUpdateType {
+ Call = "call",
+ Result = "result",
+ Error = "error",
+ ETA = "eta",
+}
+
+interface MessageToolUpdateBase {
+ type: MessageUpdateType.Tool;
+ subtype: TSubtype;
+ uuid: string;
+}
+
+export interface MessageToolCallUpdate extends MessageToolUpdateBase {
+ call: ToolCall;
+}
+
+export interface MessageToolResultUpdate
+ extends MessageToolUpdateBase {
+ result: ToolResult;
+}
+
+export interface MessageToolErrorUpdate extends MessageToolUpdateBase {
+ message: string;
+}
+
+export interface MessageToolEtaUpdate extends MessageToolUpdateBase {
+ eta: number;
+}
+
+export type MessageToolUpdate =
+ | MessageToolCallUpdate
+ | MessageToolResultUpdate
+ | MessageToolErrorUpdate
+ | MessageToolEtaUpdate;
+
export enum MessageReasoningUpdateType {
Stream = "stream",
Status = "status",
diff --git a/src/lib/types/Settings.ts b/src/lib/types/Settings.ts
index d988ca8c8d836074b12f6be6a979e2f8c26225d5..15500c4e4156d37cffd73589bb972dc0f834eb30 100644
--- a/src/lib/types/Settings.ts
+++ b/src/lib/types/Settings.ts
@@ -21,6 +21,12 @@ export interface Settings extends Timestamps {
*/
multimodalOverrides?: Record;
+ /**
+ * Per‑model overrides to enable tool calling (OpenAI tools/function calling)
+ * even when not advertised by the provider list. Only `true` is meaningful.
+ */
+ toolsOverrides?: Record;
+
/**
* Per-model toggle to hide Omni prompt suggestions shown near the composer.
* When set to `true`, prompt examples for that model are suppressed.
@@ -38,6 +44,7 @@ export const DEFAULT_SETTINGS = {
activeModel: defaultModel.id,
customPrompts: {},
multimodalOverrides: {},
+ toolsOverrides: {},
hidePromptExamples: {},
disableStream: false,
directPaste: false,
diff --git a/src/lib/types/Tool.ts b/src/lib/types/Tool.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e2172e17ce0d6ba34a9c647ec205715c9e68ab38
--- /dev/null
+++ b/src/lib/types/Tool.ts
@@ -0,0 +1,74 @@
+export enum ToolResultStatus {
+ Success = "success",
+ Error = "error",
+}
+
+export interface ToolCall {
+ name: string;
+ parameters: Record;
+ toolId?: string;
+}
+
+export interface ToolResultSuccess {
+ status: ToolResultStatus.Success;
+ call: ToolCall;
+ outputs: Record[];
+ display?: boolean;
+}
+
+export interface ToolResultError {
+ status: ToolResultStatus.Error;
+ call: ToolCall;
+ message: string;
+ display?: boolean;
+}
+
+export type ToolResult = ToolResultSuccess | ToolResultError;
+
+export interface ToolFront {
+ _id: string;
+ name: string;
+ displayName?: string;
+ description?: string;
+ color?: string;
+ icon?: string;
+ type?: "config" | "community";
+ isOnByDefault?: boolean;
+ isLocked?: boolean;
+ mimeTypes?: string[];
+ timeToUseMS?: number;
+}
+
+// MCP Server types
+export interface KeyValuePair {
+ key: string;
+ value: string;
+}
+
+export type ServerStatus = "connected" | "connecting" | "disconnected" | "error";
+
+export interface MCPTool {
+ name: string;
+ description?: string;
+ inputSchema?: unknown;
+}
+
+export interface MCPServer {
+ id: string;
+ name: string;
+ url: string;
+ type: "base" | "custom";
+ headers?: KeyValuePair[];
+ env?: KeyValuePair[];
+ status?: ServerStatus;
+ isLocked?: boolean;
+ tools?: MCPTool[];
+ errorMessage?: string;
+ // Indicates server reports or appears to require OAuth or other auth
+ authRequired?: boolean;
+}
+
+export interface MCPServerApi {
+ url: string;
+ headers?: KeyValuePair[];
+}
diff --git a/src/lib/utils/favicon.ts b/src/lib/utils/favicon.ts
new file mode 100644
index 0000000000000000000000000000000000000000..d7de81df30ac4b747adcfb6287bafdbe5e342245
--- /dev/null
+++ b/src/lib/utils/favicon.ts
@@ -0,0 +1,21 @@
+/**
+ * Generates a Google favicon URL for the given server URL
+ * @param serverUrl - The MCP server URL (e.g., "https://mcp.exa.ai/mcp")
+ * @param size - The size of the favicon in pixels (default: 64)
+ * @returns The Google favicon service URL
+ */
+export function getMcpServerFaviconUrl(serverUrl: string, size: number = 64): string {
+ try {
+ const parsed = new URL(serverUrl);
+ // Extract root domain (e.g., "exa.ai" from "mcp.exa.ai")
+ // Google's favicon service needs the root domain, not subdomains
+ const hostnameParts = parsed.hostname.split(".");
+ const rootDomain =
+ hostnameParts.length >= 2 ? hostnameParts.slice(-2).join(".") : parsed.hostname;
+ const domain = `${parsed.protocol}//${rootDomain}`;
+ return `https://www.google.com/s2/favicons?sz=${size}&domain_url=${encodeURIComponent(domain)}`;
+ } catch {
+ // If URL parsing fails, just use the raw serverUrl - Google will handle it
+ return `https://www.google.com/s2/favicons?sz=${size}&domain_url=${encodeURIComponent(serverUrl)}`;
+ }
+}
diff --git a/src/lib/utils/hf.ts b/src/lib/utils/hf.ts
new file mode 100644
index 0000000000000000000000000000000000000000..d80bb32810e84d50832cfbae91ed9a976de11064
--- /dev/null
+++ b/src/lib/utils/hf.ts
@@ -0,0 +1,15 @@
+// Client-safe HF utilities used in UI components
+
+export function isStrictHfMcpLogin(urlString: string): boolean {
+ try {
+ const u = new URL(urlString);
+ return (
+ u.protocol === "https:" &&
+ u.hostname === "huggingface.co" &&
+ u.pathname === "/mcp" &&
+ u.search === "?login"
+ );
+ } catch {
+ return false;
+ }
+}
diff --git a/src/lib/utils/mcpValidation.ts b/src/lib/utils/mcpValidation.ts
new file mode 100644
index 0000000000000000000000000000000000000000..98a5fd474878c20be62df8476044ec056c904124
--- /dev/null
+++ b/src/lib/utils/mcpValidation.ts
@@ -0,0 +1,147 @@
+/**
+ * URL validation and sanitization utilities for MCP integration
+ */
+
+import { browser } from "$app/environment";
+import { dev } from "$app/environment";
+
+/**
+ * Sanitize and validate a URL for MCP server connections
+ * @param urlString - The URL string to validate
+ * @returns Sanitized URL string or null if invalid
+ */
+export function validateMcpServerUrl(urlString: string): string | null {
+ if (!urlString || typeof urlString !== "string") {
+ return null;
+ }
+
+ try {
+ const url = new URL(urlString.trim());
+
+ // Allow http/https only
+ if (!["http:", "https:"].includes(url.protocol)) {
+ return null;
+ }
+
+ // Warn about non-HTTPS in production
+ if (!dev && url.protocol === "http:" && browser) {
+ console.warn(
+ "Warning: Connecting to non-HTTPS MCP server in production. This may expose sensitive data."
+ );
+ }
+
+ // Block certain localhost/private IPs in production
+ if (!dev && isPrivateOrLocalhost(url.hostname)) {
+ console.warn("Warning: Localhost/private IP addresses are not recommended in production.");
+ }
+
+ return url.toString();
+ } catch (error) {
+ // Invalid URL
+ return null;
+ }
+}
+
+/**
+ * Check if hostname is localhost or a private IP
+ */
+function isPrivateOrLocalhost(hostname: string): boolean {
+ // Localhost checks
+ if (
+ hostname === "localhost" ||
+ hostname === "127.0.0.1" ||
+ hostname === "::1" ||
+ hostname.endsWith(".localhost")
+ ) {
+ return true;
+ }
+
+ // Private IP ranges (IPv4)
+ const ipv4Regex = /^(10\.|172\.(1[6-9]|2[0-9]|3[01])\.|192\.168\.|127\.|0\.0\.0\.0|169\.254\.)/;
+ if (ipv4Regex.test(hostname)) {
+ return true;
+ }
+
+ return false;
+}
+
+/**
+ * Sanitize URL by removing sensitive parts
+ * Used for logging and display purposes
+ */
+export function sanitizeUrlForDisplay(urlString: string): string {
+ try {
+ const url = new URL(urlString);
+ // Remove username/password if present
+ url.username = "";
+ url.password = "";
+ return url.toString();
+ } catch {
+ return urlString;
+ }
+}
+
+/**
+ * Check if URL is safe to connect to
+ * Returns an error message if unsafe, null if safe
+ */
+export function checkUrlSafety(urlString: string): string | null {
+ const validated = validateMcpServerUrl(urlString);
+ if (!validated) {
+ return "Invalid URL. Please use http:// or https:// URLs only.";
+ }
+
+ try {
+ const url = new URL(validated);
+
+ // Additional safety checks
+ if (!dev && url.protocol === "http:") {
+ return "Non-HTTPS URLs are not recommended in production. Please use https:// for security.";
+ }
+
+ return null; // Safe
+ } catch {
+ return "Invalid URL format.";
+ }
+}
+
+/**
+ * Check if a header key is likely to contain sensitive data
+ */
+export function isSensitiveHeader(key: string): boolean {
+ const sensitiveKeys = [
+ "authorization",
+ "api-key",
+ "api_key",
+ "apikey",
+ "token",
+ "secret",
+ "password",
+ "bearer",
+ "x-api-key",
+ "x-auth-token",
+ ];
+
+ const lowerKey = key.toLowerCase();
+ return sensitiveKeys.some((sensitive) => lowerKey.includes(sensitive));
+}
+
+/**
+ * Validate header key-value pair
+ * Returns error message if invalid, null if valid
+ */
+export function validateHeader(key: string, value: string): string | null {
+ if (!key || !key.trim()) {
+ return "Header name is required";
+ }
+
+ if (!/^[a-zA-Z0-9_-]+$/.test(key)) {
+ return "Header name can only contain letters, numbers, hyphens, and underscores";
+ }
+
+ if (!value) {
+ return "Header value is required";
+ }
+
+ return null;
+}
diff --git a/src/lib/utils/messageUpdates.ts b/src/lib/utils/messageUpdates.ts
index c72fc0e514a63efebf0501a2033135c66cfc3df8..338502f4ef59a2f8212d6f8e518c0ffb4ee046b3 100644
--- a/src/lib/utils/messageUpdates.ts
+++ b/src/lib/utils/messageUpdates.ts
@@ -2,17 +2,28 @@ import type { MessageFile } from "$lib/types/Message";
import {
type MessageUpdate,
type MessageStreamUpdate,
+ type MessageToolUpdate,
+ type MessageToolCallUpdate,
+ type MessageToolResultUpdate,
+ type MessageToolErrorUpdate,
MessageUpdateType,
+ MessageToolUpdateType,
} from "$lib/types/MessageUpdate";
import { page } from "$app/state";
+import type { KeyValuePair } from "$lib/types/Tool";
type MessageUpdateRequestOptions = {
base: string;
inputs?: string;
messageId?: string;
isRetry: boolean;
+ isContinue?: boolean;
files?: MessageFile[];
+ // Optional: pass selected MCP server names (client-side selection)
+ selectedMcpServerNames?: string[];
+ // Optional: pass selected MCP server configs (for custom client-defined servers)
+ selectedMcpServers?: Array<{ name: string; url: string; headers?: KeyValuePair[] }>;
};
export async function fetchMessageUpdates(
conversationId: string,
@@ -28,6 +39,10 @@ export async function fetchMessageUpdates(
inputs: opts.inputs,
id: opts.messageId,
is_retry: opts.isRetry,
+ is_continue: Boolean(opts.isContinue),
+ // Will be ignored server-side if unsupported
+ selectedMcpServerNames: opts.selectedMcpServerNames,
+ selectedMcpServers: opts.selectedMcpServers,
});
opts.files?.forEach((file) => {
@@ -225,6 +240,21 @@ async function* smoothAsyncIterator(iterator: AsyncGenerator): AsyncGenera
}
}
+// Tool update type guards for UI rendering
+export const isMessageToolUpdate = (update: MessageUpdate): update is MessageToolUpdate =>
+ update.type === MessageUpdateType.Tool;
+
+export const isMessageToolCallUpdate = (update: MessageUpdate): update is MessageToolCallUpdate =>
+ isMessageToolUpdate(update) && update.subtype === MessageToolUpdateType.Call;
+
+export const isMessageToolResultUpdate = (
+ update: MessageUpdate
+): update is MessageToolResultUpdate =>
+ isMessageToolUpdate(update) && update.subtype === MessageToolUpdateType.Result;
+
+export const isMessageToolErrorUpdate = (update: MessageUpdate): update is MessageToolErrorUpdate =>
+ isMessageToolUpdate(update) && update.subtype === MessageToolUpdateType.Error;
+
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const waitForEvent = (eventTarget: EventTarget, eventName: string) =>
new Promise((resolve) =>
diff --git a/src/routes/api/fetch-url/+server.ts b/src/routes/api/fetch-url/+server.ts
index bb9859d5d5f1efd96e169f19f8dc8dfd2d087c9f..90752a0a1e501a6d5d4e92d3fcab459ccb4f2dc3 100644
--- a/src/routes/api/fetch-url/+server.ts
+++ b/src/routes/api/fetch-url/+server.ts
@@ -1,36 +1,11 @@
import { error } from "@sveltejs/kit";
import { logger } from "$lib/server/logger.js";
import { fetch } from "undici";
+import { isValidUrl } from "$lib/server/urlSafety";
const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
const FETCH_TIMEOUT = 30000; // 30 seconds
-// Validate URL safety - HTTPS only
-function isValidUrl(urlString: string): boolean {
- try {
- const url = new URL(urlString);
- // Only allow HTTPS protocol
- if (url.protocol !== "https:") {
- return false;
- }
- // Prevent localhost/private IPs (basic check)
- const hostname = url.hostname.toLowerCase();
- if (
- hostname === "localhost" ||
- hostname.startsWith("127.") ||
- hostname.startsWith("192.168.") ||
- hostname.startsWith("172.16.") ||
- hostname === "[::1]" ||
- hostname === "0.0.0.0"
- ) {
- return false;
- }
- return true;
- } catch {
- return false;
- }
-}
-
export async function GET({ url }) {
const targetUrl = url.searchParams.get("url");
diff --git a/src/routes/api/mcp/health/+server.ts b/src/routes/api/mcp/health/+server.ts
new file mode 100644
index 0000000000000000000000000000000000000000..12d542388bb84b190a90c90193a588f4fb886b15
--- /dev/null
+++ b/src/routes/api/mcp/health/+server.ts
@@ -0,0 +1,265 @@
+import { Client } from "@modelcontextprotocol/sdk/client/index.js";
+import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
+import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
+import type { KeyValuePair } from "$lib/types/Tool";
+import { config } from "$lib/server/config";
+import type { RequestHandler } from "./$types";
+import { isValidUrl } from "$lib/server/urlSafety";
+import { isStrictHfMcpLogin, hasNonEmptyToken } from "$lib/server/mcp/hf";
+
+interface HealthCheckRequest {
+ url: string;
+ headers?: KeyValuePair[];
+}
+
+interface HealthCheckResponse {
+ ready: boolean;
+ tools?: Array<{
+ name: string;
+ description?: string;
+ inputSchema?: unknown;
+ }>;
+ error?: string;
+ authRequired?: boolean;
+}
+
+export const POST: RequestHandler = async ({ request, locals }) => {
+ let client: Client | undefined;
+
+ try {
+ const body: HealthCheckRequest = await request.json();
+ const { url, headers } = body;
+
+ if (!url) {
+ return new Response(JSON.stringify({ ready: false, error: "URL is required" }), {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+
+ // URL validation handled above
+
+ if (!isValidUrl(url)) {
+ return new Response(
+ JSON.stringify({
+ ready: false,
+ error: "Invalid or unsafe URL (only HTTPS is supported)",
+ } as HealthCheckResponse),
+ { status: 400, headers: { "Content-Type": "application/json" } }
+ );
+ }
+
+ const baseUrl = new URL(url);
+
+ // Minimal header handling
+ const headersRecord: Record = headers?.length
+ ? Object.fromEntries(headers.map((h) => [h.key, h.value]))
+ : {};
+ if (!headersRecord["Accept"]) {
+ headersRecord["Accept"] = "application/json, text/event-stream";
+ }
+
+ // If enabled, attach the logged-in user's HF token only for the official HF MCP endpoint
+ try {
+ const shouldForward = config.MCP_FORWARD_HF_USER_TOKEN === "true";
+ const userToken =
+ (locals as unknown as { hfAccessToken?: string } | undefined)?.hfAccessToken ??
+ (locals as unknown as { token?: string } | undefined)?.token;
+ const hasAuth = typeof headersRecord["Authorization"] === "string";
+ const isHfMcpTarget = isStrictHfMcpLogin(url);
+ if (shouldForward && !hasAuth && isHfMcpTarget && hasNonEmptyToken(userToken)) {
+ headersRecord["Authorization"] = `Bearer ${userToken}`;
+ }
+ } catch {
+ // best-effort overlay
+ }
+
+ // Add an abort timeout to outbound requests (align with fetch-url: 30s)
+ const controller = new AbortController();
+ const timeoutId = setTimeout(() => controller.abort(), 30000);
+ const signal = controller.signal;
+ const requestInit: RequestInit = {
+ headers: headersRecord,
+ signal,
+ };
+
+ let lastError: Error | undefined;
+
+ // Try Streamable HTTP transport first
+ try {
+ console.log(`[MCP Health] Trying HTTP transport for ${url}`);
+ client = new Client({
+ name: "chat-ui-health-check",
+ version: "1.0.0",
+ });
+
+ const transport = new StreamableHTTPClientTransport(baseUrl, { requestInit });
+ console.log(`[MCP Health] Connecting to ${url}...`);
+ await client.connect(transport);
+ console.log(`[MCP Health] Connected successfully via HTTP`);
+
+ // Connection successful, get tools
+ const toolsResponse = await client.listTools();
+
+ // Disconnect after getting tools
+ await client.close();
+
+ if (toolsResponse && toolsResponse.tools) {
+ const response: HealthCheckResponse = {
+ ready: true,
+ tools: toolsResponse.tools.map((tool) => ({
+ name: tool.name,
+ description: tool.description,
+ inputSchema: tool.inputSchema,
+ })),
+ authRequired: false,
+ };
+
+ const res = new Response(JSON.stringify(response), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ clearTimeout(timeoutId);
+ return res;
+ } else {
+ const res = new Response(
+ JSON.stringify({
+ ready: false,
+ error: "Connected but no tools available",
+ authRequired: false,
+ } as HealthCheckResponse),
+ {
+ status: 503,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ clearTimeout(timeoutId);
+ return res;
+ }
+ } catch (error) {
+ lastError = error instanceof Error ? error : new Error(String(error));
+ console.log("Streamable HTTP failed, trying SSE transport...", lastError.message);
+
+ // Close failed client
+ try {
+ await client?.close();
+ } catch {
+ // Ignore
+ }
+
+ // Try SSE transport
+ try {
+ console.log(`[MCP Health] Trying SSE transport for ${url}`);
+ client = new Client({
+ name: "chat-ui-health-check",
+ version: "1.0.0",
+ });
+
+ const sseTransport = new SSEClientTransport(baseUrl, { requestInit });
+ console.log(`[MCP Health] Connecting via SSE...`);
+ await client.connect(sseTransport);
+ console.log(`[MCP Health] Connected successfully via SSE`);
+
+ // Connection successful, get tools
+ const toolsResponse = await client.listTools();
+
+ // Disconnect after getting tools
+ await client.close();
+
+ if (toolsResponse && toolsResponse.tools) {
+ const response: HealthCheckResponse = {
+ ready: true,
+ tools: toolsResponse.tools.map((tool) => ({
+ name: tool.name,
+ description: tool.description,
+ inputSchema: tool.inputSchema,
+ })),
+ authRequired: false,
+ };
+
+ const res = new Response(JSON.stringify(response), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ clearTimeout(timeoutId);
+ return res;
+ } else {
+ const res = new Response(
+ JSON.stringify({
+ ready: false,
+ error: "Connected but no tools available",
+ authRequired: false,
+ } as HealthCheckResponse),
+ {
+ status: 503,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ clearTimeout(timeoutId);
+ return res;
+ }
+ } catch (sseError) {
+ lastError = sseError instanceof Error ? sseError : new Error(String(sseError));
+ console.error("Both transports failed. Last error:", lastError);
+ }
+ }
+
+ // Both transports failed
+ let errorMessage = lastError?.message || "Failed to connect to MCP server";
+
+ // Detect unauthorized to signal auth requirement
+ const lower = (errorMessage || "").toLowerCase();
+ const authRequired =
+ lower.includes("unauthorized") ||
+ lower.includes("forbidden") ||
+ lower.includes("401") ||
+ lower.includes("403");
+
+ // Provide more helpful error messages
+ if (authRequired) {
+ errorMessage =
+ "Authentication required. Provide appropriate Authorization headers in the server configuration.";
+ } else if (errorMessage.includes("not valid JSON")) {
+ errorMessage =
+ "Server returned invalid response. This might not be a valid MCP endpoint. MCP servers should respond to POST requests at /mcp with JSON-RPC messages.";
+ } else if (errorMessage.includes("fetch failed") || errorMessage.includes("ECONNREFUSED")) {
+ errorMessage = `Cannot connect to ${url}. Please verify the server is running and accessible.`;
+ } else if (errorMessage.includes("CORS")) {
+ errorMessage = `CORS error. The MCP server needs to allow requests from this origin.`;
+ }
+
+ const res = new Response(
+ JSON.stringify({
+ ready: false,
+ error: errorMessage,
+ authRequired,
+ } as HealthCheckResponse),
+ {
+ status: 503,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ clearTimeout(timeoutId);
+ return res;
+ } catch (error) {
+ console.error("MCP health check failed:", error);
+
+ // Clean up client if it exists
+ try {
+ await client?.close();
+ } catch {
+ // Ignore
+ }
+
+ const response: HealthCheckResponse = {
+ ready: false,
+ error: error instanceof Error ? error.message : "Unknown error",
+ };
+
+ const res = new Response(JSON.stringify(response), {
+ status: 503,
+ headers: { "Content-Type": "application/json" },
+ });
+ return res;
+ }
+};
diff --git a/src/routes/api/mcp/servers/+server.ts b/src/routes/api/mcp/servers/+server.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f8ecc78f91b143f2a37b38d09b42035e136d7f4a
--- /dev/null
+++ b/src/routes/api/mcp/servers/+server.ts
@@ -0,0 +1,32 @@
+import type { MCPServer } from "$lib/types/Tool";
+import { env } from "$env/dynamic/private";
+
+export async function GET() {
+ // Parse MCP_SERVERS environment variable
+ const mcpServersEnv = env.MCP_SERVERS || "[]";
+
+ let servers: Array<{ name: string; url: string; headers?: Record }> = [];
+
+ try {
+ servers = JSON.parse(mcpServersEnv);
+ if (!Array.isArray(servers)) {
+ servers = [];
+ }
+ } catch (error) {
+ console.error("Failed to parse MCP_SERVERS env variable:", error);
+ servers = [];
+ }
+
+ // Convert internal server config to client MCPServer format
+ const mcpServers: MCPServer[] = servers.map((server) => ({
+ id: `base-${server.name}`, // Stable ID based on name
+ name: server.name,
+ url: server.url,
+ type: "base" as const,
+ // headers intentionally omitted
+ isLocked: false, // Base servers can be toggled by users
+ status: undefined, // Status determined client-side via health check
+ }));
+
+ return Response.json(mcpServers);
+}
diff --git a/src/routes/api/models/+server.ts b/src/routes/api/models/+server.ts
index 6a1d41c495537ba5cc5ea117ed63b59cd973075d..dbde2160814854ec96c907c9ba6dbd3e79eb9e72 100644
--- a/src/routes/api/models/+server.ts
+++ b/src/routes/api/models/+server.ts
@@ -17,6 +17,7 @@ export async function GET() {
promptExamples: model.promptExamples ?? [],
preprompt: model.preprompt ?? "",
multimodal: model.multimodal ?? false,
+ supportsTools: (model as unknown as { supportsTools?: boolean }).supportsTools ?? false,
unlisted: model.unlisted ?? false,
hasInferenceAPI: model.hasInferenceAPI ?? false,
}));
diff --git a/src/routes/conversation/[id]/+page.svelte b/src/routes/conversation/[id]/+page.svelte
index e34deba43a8c722ea954b0a7f89fd4f9d08a5a80..460515b12b7fb006eddcd2e848be3c93211e329d 100644
--- a/src/routes/conversation/[id]/+page.svelte
+++ b/src/routes/conversation/[id]/+page.svelte
@@ -17,6 +17,7 @@
import { fetchMessageUpdates } from "$lib/utils/messageUpdates";
import type { v4 } from "uuid";
import { useSettingsStore } from "$lib/stores/settings.js";
+ import { enabledServers } from "$lib/stores/mcpServers";
import { browser } from "$app/environment";
import {
addBackgroundGeneration,
@@ -219,6 +220,12 @@
messageId,
isRetry,
files: isRetry ? userMessage?.files : base64Files,
+ selectedMcpServerNames: $enabledServers.map((s) => s.name),
+ selectedMcpServers: $enabledServers.map((s) => ({
+ name: s.name,
+ url: s.url,
+ headers: s.headers,
+ })),
},
messageUpdatesAbortController.signal
).catch((err) => {
diff --git a/src/routes/conversation/[id]/+server.ts b/src/routes/conversation/[id]/+server.ts
index 6b85afefb6f15b8b077fa5b7c64af51275074e0f..aee5179c27764e8000060f4df2d3e5cd774c2c98 100644
--- a/src/routes/conversation/[id]/+server.ts
+++ b/src/routes/conversation/[id]/+server.ts
@@ -9,6 +9,7 @@ import { z } from "zod";
import {
MessageUpdateStatus,
MessageUpdateType,
+ MessageReasoningUpdateType,
type MessageUpdate,
} from "$lib/types/MessageUpdate";
import { uploadFile } from "$lib/server/files/uploadFile";
@@ -124,6 +125,8 @@ export async function POST({ request, locals, params, getClientAddress }) {
inputs: newPrompt,
id: messageId,
is_retry: isRetry,
+ selectedMcpServerNames,
+ selectedMcpServers,
} = z
.object({
id: z.string().uuid().refine(isMessageId).optional(), // parent message id to append to for a normal message, or the message id for a retry/continue
@@ -134,6 +137,20 @@ export async function POST({ request, locals, params, getClientAddress }) {
.transform((s) => s.replace(/\r\n/g, "\n"))
),
is_retry: z.optional(z.boolean()),
+ selectedMcpServerNames: z.optional(z.array(z.string())),
+ selectedMcpServers: z
+ .optional(
+ z.array(
+ z.object({
+ name: z.string(),
+ url: z.string(),
+ headers: z
+ .optional(z.array(z.object({ key: z.string(), value: z.string() })))
+ .default([]),
+ })
+ )
+ )
+ .default([]),
files: z.optional(
z.array(
z.object({
@@ -147,6 +164,23 @@ export async function POST({ request, locals, params, getClientAddress }) {
})
.parse(JSON.parse(json));
+ // Attach MCP selection to locals so the text generation pipeline can consume it
+ try {
+ (locals as unknown as Record).mcp = {
+ selectedServerNames: selectedMcpServerNames,
+ selectedServers: (selectedMcpServers ?? []).map((s) => ({
+ name: s.name,
+ url: s.url,
+ headers:
+ s.headers && s.headers.length > 0
+ ? Object.fromEntries(s.headers.map((h) => [h.key, h.value]))
+ : undefined,
+ })),
+ };
+ } catch {
+ // ignore attachment errors, pipeline will just use env servers
+ }
+
const inputFiles = await Promise.all(
form
.getAll("files")
@@ -343,6 +377,16 @@ export async function POST({ request, locals, params, getClientAddress }) {
lastTokenTimestamp = new Date();
}
+ // Append reasoning stream tokens to message.reasoning (server-side)
+ else if (
+ event.type === MessageUpdateType.Reasoning &&
+ event.subtype === MessageReasoningUpdateType.Stream &&
+ "token" in event
+ ) {
+ messageToWriteTo.reasoning ??= "";
+ messageToWriteTo.reasoning += event.token;
+ }
+
// Set the title
else if (event.type === MessageUpdateType.Title) {
// Always strip markers from titles when saving
@@ -357,7 +401,40 @@ export async function POST({ request, locals, params, getClientAddress }) {
// Set the final text and the interrupted flag
else if (event.type === MessageUpdateType.FinalAnswer) {
messageToWriteTo.interrupted = event.interrupted;
- messageToWriteTo.content = initialMessageContent + event.text;
+ // Default behavior: replace the streamed text with the provider's final text.
+ // However, when tools (MCP/function calls) were used, providers often stream
+ // some content (e.g., a story) before triggering tools, then return a
+ // different follow‑up message afterwards (e.g., an image caption). Our
+ // previous logic overwrote the pre‑tool content. Preserve it by merging in
+ // the pre‑tool stream when tool updates occurred and the final text does
+ // not already include the streamed prefix.
+ const hadTools = (messageToWriteTo.updates ?? []).some(
+ (u) => u.type === MessageUpdateType.Tool
+ );
+
+ if (hadTools) {
+ const existing = messageToWriteTo.content.slice(initialMessageContent.length);
+ if (existing && existing.length > 0) {
+ // A. If we already streamed the same final text, keep as-is.
+ if (event.text && existing.endsWith(event.text)) {
+ messageToWriteTo.content = initialMessageContent + existing;
+ }
+ // B. If the final text already includes the streamed prefix, use it verbatim.
+ else if (event.text && event.text.startsWith(existing)) {
+ messageToWriteTo.content = initialMessageContent + event.text;
+ }
+ // C. Otherwise, merge with a paragraph break for readability.
+ else {
+ const needsGap = !/\n\n$/.test(existing) && !/^\n/.test(event.text ?? "");
+ messageToWriteTo.content =
+ initialMessageContent + existing + (needsGap ? "\n\n" : "") + (event.text ?? "");
+ }
+ } else {
+ messageToWriteTo.content = initialMessageContent + (event.text ?? "");
+ }
+ } else {
+ messageToWriteTo.content = initialMessageContent + event.text;
+ }
finalAnswerReceived = true;
if (metricsEnabled && metrics) {
@@ -455,6 +532,10 @@ export async function POST({ request, locals, params, getClientAddress }) {
model.id
]
),
+ // Force-enable tools if user settings say so for this model
+ forceTools: Boolean(
+ (await collections.settings.findOne(authCondition(locals)))?.toolsOverrides?.[model.id]
+ ),
locals,
abortController: ctrl,
};
@@ -506,6 +587,17 @@ export async function POST({ request, locals, params, getClientAddress }) {
} finally {
// check if no output was generated
if (!hasError && !abortedByUser && messageToWriteTo.content === initialMessageContent) {
+ logger.warn(
+ {
+ conversationId: conversationKey,
+ updatesCount: messageToWriteTo.updates?.length ?? 0,
+ filesCount: messageToWriteTo.files?.length ?? 0,
+ reasoningLen: messageToWriteTo.reasoning?.length ?? 0,
+ initialLen: initialMessageContent.length,
+ finalLen: messageToWriteTo.content.length,
+ },
+ "No output generated after streaming; emitting error status"
+ );
await update({
type: MessageUpdateType.Status,
status: MessageUpdateStatus.Error,
diff --git a/src/routes/models/+page.svelte b/src/routes/models/+page.svelte
index 2180111145f279b640b65c9d90529da552a193ac..629b907138a5be3879c7287b17e267cc343effd2 100644
--- a/src/routes/models/+page.svelte
+++ b/src/routes/models/+page.svelte
@@ -7,6 +7,7 @@
import CarbonHelpFilled from "~icons/carbon/help-filled";
import CarbonView from "~icons/carbon/view";
+ import CarbonTools from "~icons/carbon/tools";
import CarbonSettings from "~icons/carbon/settings";
import { useSettingsStore } from "$lib/stores/settings";
import { goto } from "$app/navigation";
@@ -98,14 +99,24 @@
>
{/if}
+ {#if $settings.toolsOverrides?.[model.id] ?? (model as { supportsTools?: boolean }).supportsTools}
+
+
+
+ {/if}
{#if $settings.multimodalOverrides?.[model.id] ?? model.multimodal}
-
+
{/if}
- {model.isRouter ? "Routes your messages to the best model for your request." : model.description || "-"}
+ {model.isRouter
+ ? "Routes your messages to the best model for your request."
+ : model.description || "-"}
{/each}
@@ -155,8 +168,11 @@
background-image:
radial-gradient(900px 300px at -10% -20%, rgba(59, 130, 246, 0.16), transparent 60%),
radial-gradient(700px 240px at 110% 120%, rgba(16, 185, 129, 0.16), transparent 60%),
- linear-gradient(135deg, rgba(236, 72, 153, 0.10), rgba(59, 130, 246, 0.08));
- box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.06), 0 6px 18px rgba(59, 130, 246, 0.12), 0 2px 8px rgba(236, 72, 153, 0.10);
+ linear-gradient(135deg, rgba(236, 72, 153, 0.1), rgba(59, 130, 246, 0.08));
+ box-shadow:
+ inset 0 1px 0 rgba(255, 255, 255, 0.06),
+ 0 6px 18px rgba(59, 130, 246, 0.12),
+ 0 2px 8px rgba(236, 72, 153, 0.1);
}
:global(.dark) .omni-gradient {
@@ -164,8 +180,10 @@
radial-gradient(900px 300px at -10% -20%, rgba(59, 130, 246, 0.12), transparent 60%),
radial-gradient(700px 240px at 110% 120%, rgba(16, 185, 129, 0.12), transparent 60%),
linear-gradient(135deg, rgba(236, 72, 153, 0.08), rgba(59, 130, 246, 0.06));
- box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.04), 0 10px 28px rgba(0, 0, 0, 0.25);
+ box-shadow:
+ inset 0 1px 0 rgba(255, 255, 255, 0.04),
+ 0 10px 28px rgba(0, 0, 0, 0.25);
}
-/* Active border handled via Tailwind utilities (see .active-model in src/styles/main.css) */
+ /* Active border handled via Tailwind utilities (see .active-model in src/styles/main.css) */
diff --git a/src/routes/settings/(nav)/+layout.svelte b/src/routes/settings/(nav)/+layout.svelte
index 4e5994b8b89605fc46b074456ed42cce5baf3280..9f81f2f82eb1d1bfafb13f7c03487233ec8c0290 100644
--- a/src/routes/settings/(nav)/+layout.svelte
+++ b/src/routes/settings/(nav)/+layout.svelte
@@ -9,6 +9,7 @@
import CarbonTextLongParagraph from "~icons/carbon/text-long-paragraph";
import CarbonChevronLeft from "~icons/carbon/chevron-left";
import CarbonView from "~icons/carbon/view";
+ import CarbonTools from "~icons/carbon/tools";
import IconGear from "~icons/bi/gear-fill";
import type { LayoutData } from "../$types";
@@ -173,14 +174,25 @@
{/if}
+ {#if $settings.toolsOverrides?.[model.id] ?? (model as { supportsTools?: boolean }).supportsTools}
+
+
+
+ {/if}
+
{#if $settings.multimodalOverrides?.[model.id] ?? model.multimodal}
-
+
{/if}
diff --git a/src/routes/settings/(nav)/+server.ts b/src/routes/settings/(nav)/+server.ts
index 3222cb58d3ab12f1317a2a3cad92a6f49937b535..07ac4f13d2f7a09a54706b7832d0d9c41bd27ac1 100644
--- a/src/routes/settings/(nav)/+server.ts
+++ b/src/routes/settings/(nav)/+server.ts
@@ -15,6 +15,7 @@ export async function POST({ request, locals }) {
activeModel: z.string().default(DEFAULT_SETTINGS.activeModel),
customPrompts: z.record(z.string()).default({}),
multimodalOverrides: z.record(z.boolean()).default({}),
+ toolsOverrides: z.record(z.boolean()).default({}),
disableStream: z.boolean().default(false),
directPaste: z.boolean().default(false),
hidePromptExamples: z.record(z.boolean()).default({}),
diff --git a/src/routes/settings/(nav)/[...model]/+page.svelte b/src/routes/settings/(nav)/[...model]/+page.svelte
index ca8285ce68a354f68dba9d86273287c11061bc04..b543cbcc5463370ee317444c04884cf1b12cc370 100644
--- a/src/routes/settings/(nav)/[...model]/+page.svelte
+++ b/src/routes/settings/(nav)/[...model]/+page.svelte
@@ -19,6 +19,46 @@
const publicConfig = usePublicConfig();
const settings = useSettingsStore();
+ // Functional bindings for nested settings (Svelte 5):
+ // Avoid binding directly to $settings.*[modelId]; write via store update
+ function getToolsOverride() {
+ return $settings.toolsOverrides?.[page.params.model] ?? false;
+ }
+ function setToolsOverride(v: boolean) {
+ settings.update((s) => ({
+ ...s,
+ toolsOverrides: { ...s.toolsOverrides, [page.params.model]: v },
+ }));
+ }
+ function getMultimodalOverride() {
+ return $settings.multimodalOverrides?.[page.params.model] ?? false;
+ }
+ function setMultimodalOverride(v: boolean) {
+ settings.update((s) => ({
+ ...s,
+ multimodalOverrides: { ...s.multimodalOverrides, [page.params.model]: v },
+ }));
+ }
+ function getHidePromptExamples() {
+ return $settings.hidePromptExamples?.[page.params.model] ?? false;
+ }
+ function setHidePromptExamples(v: boolean) {
+ settings.update((s) => ({
+ ...s,
+ hidePromptExamples: { ...s.hidePromptExamples, [page.params.model]: v },
+ }));
+ }
+
+ function getCustomPrompt() {
+ return $settings.customPrompts?.[page.params.model] ?? "";
+ }
+ function setCustomPrompt(v: string) {
+ settings.update((s) => ({
+ ...s,
+ customPrompts: { ...s.customPrompts, [page.params.model]: v },
+ }));
+ }
+
type RouterProvider = { provider: string } & Record;
$effect(() => {
@@ -43,6 +83,17 @@
}
});
+ // Initialize tools override for this model if not set yet
+ $effect(() => {
+ if (model) {
+ settings.initValue(
+ "toolsOverrides",
+ page.params.model,
+ Boolean((model as unknown as { supportsTools?: boolean }).supportsTools)
+ );
+ }
+ });
+
// Ensure hidePromptExamples has an entry for this model so the switch can bind safely
$effect(() => {
settings.initValue("hidePromptExamples", page.params.model, false);
@@ -161,7 +212,10 @@
class="ml-auto text-xs underline decoration-gray-300 hover:decoration-gray-700 dark:decoration-gray-700 dark:hover:decoration-gray-400"
onclick={(e) => {
e.stopPropagation();
- $settings.customPrompts[page.params.model] = model.preprompt;
+ settings.update((s) => ({
+ ...s,
+ customPrompts: { ...s.customPrompts, [page.params.model]: model.preprompt },
+ }));
}}
>
Reset
@@ -173,13 +227,25 @@
aria-label="Custom system prompt"
rows="8"
class="w-full resize-none rounded-md border border-gray-200 bg-gray-50 p-2 text-[13px] dark:border-gray-700 dark:bg-gray-900 dark:text-gray-200"
- bind:value={$settings.customPrompts[page.params.model]}
+ bind:value={getCustomPrompt, setCustomPrompt}
>
+
+
+
+ Tool calling (functions)
+
+
+ Enable tools and allow the model to call them in chat.
+
+
+
+
+
{/if}
diff --git a/src/routes/settings/(nav)/application/+page.svelte b/src/routes/settings/(nav)/application/+page.svelte
index cf57ff682e9dfe22257390b2e5f9e7898c415d0f..5d0b047b2cc93547393e1e0e114f215c63c3f924 100644
--- a/src/routes/settings/(nav)/application/+page.svelte
+++ b/src/routes/settings/(nav)/application/+page.svelte
@@ -19,6 +19,26 @@
const publicConfig = usePublicConfig();
let settings = useSettingsStore();
+ // Functional bindings for store fields (Svelte 5): avoid mutating $settings directly
+ function getShareWithAuthors() {
+ return $settings.shareConversationsWithModelAuthors;
+ }
+ function setShareWithAuthors(v: boolean) {
+ settings.update((s) => ({ ...s, shareConversationsWithModelAuthors: v }));
+ }
+ function getDisableStream() {
+ return $settings.disableStream;
+ }
+ function setDisableStream(v: boolean) {
+ settings.update((s) => ({ ...s, disableStream: v }));
+ }
+ function getDirectPaste() {
+ return $settings.directPaste;
+ }
+ function setDirectPaste(v: boolean) {
+ settings.update((s) => ({ ...s, directPaste: v }));
+ }
+
const client = useAPIClient();
let OPENAI_BASE_URL: string | null = $state(null);
@@ -121,7 +141,7 @@
{/if}
@@ -135,7 +155,7 @@
Show responses only when complete.
-
+
@@ -147,7 +167,7 @@
Paste long text directly into chat instead of a file.
-
+