Commit ·
77169b4
0
Parent(s):
feat: align hosted Space deployment with latest upstream
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .dockerignore +14 -0
- .gitignore +37 -0
- Dockerfile +116 -0
- README.md +86 -0
- core/__init__.py +5 -0
- core/account/__init__.py +5 -0
- core/account/pool.py +207 -0
- core/api/__init__.py +3 -0
- core/api/anthropic_routes.py +93 -0
- core/api/auth.py +455 -0
- core/api/chat_handler.py +1106 -0
- core/api/config_routes.py +329 -0
- core/api/conv_parser.py +186 -0
- core/api/fingerprint.py +41 -0
- core/api/function_call.py +351 -0
- core/api/mock_claude.py +104 -0
- core/api/react.py +244 -0
- core/api/react_stream_parser.py +435 -0
- core/api/routes.py +177 -0
- core/api/schemas.py +168 -0
- core/app.py +166 -0
- core/config/__init__.py +10 -0
- core/config/repository.py +593 -0
- core/config/schema.py +76 -0
- core/config/settings.py +147 -0
- core/constants.py +17 -0
- core/hub/__init__.py +14 -0
- core/hub/openai_sse.py +134 -0
- core/hub/schemas.py +46 -0
- core/plugin/__init__.py +5 -0
- core/plugin/base.py +519 -0
- core/plugin/claude.py +756 -0
- core/plugin/errors.py +46 -0
- core/plugin/helpers.py +1246 -0
- core/protocol/__init__.py +1 -0
- core/protocol/anthropic.py +461 -0
- core/protocol/base.py +38 -0
- core/protocol/images.py +108 -0
- core/protocol/openai.py +251 -0
- core/protocol/schemas.py +69 -0
- core/protocol/service.py +175 -0
- core/runtime/__init__.py +12 -0
- core/runtime/browser_manager.py +839 -0
- core/runtime/conversation_index.py +72 -0
- core/runtime/keys.py +15 -0
- core/runtime/local_proxy_forwarder.py +287 -0
- core/runtime/session_cache.py +81 -0
- core/static/config.html +1698 -0
- core/static/index.html +474 -0
- core/static/login.html +255 -0
.dockerignore
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.git
|
| 2 |
+
.venv
|
| 3 |
+
__pycache__
|
| 4 |
+
*.pyc
|
| 5 |
+
*.pyo
|
| 6 |
+
*.pyd
|
| 7 |
+
.pytest_cache
|
| 8 |
+
.ruff_cache
|
| 9 |
+
.mypy_cache
|
| 10 |
+
.coverage
|
| 11 |
+
htmlcov
|
| 12 |
+
debug
|
| 13 |
+
docker-data
|
| 14 |
+
db.sqlite3
|
.gitignore
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
build/
|
| 3 |
+
.claude/
|
| 4 |
+
config.local.yaml
|
| 5 |
+
.coverage
|
| 6 |
+
.cursor/
|
| 7 |
+
# Cursor / editor state
|
| 8 |
+
db.sqlite3
|
| 9 |
+
debug/
|
| 10 |
+
# Debug output (runtime)
|
| 11 |
+
dist/
|
| 12 |
+
docker-data/
|
| 13 |
+
.DS_Store
|
| 14 |
+
*.egg-info
|
| 15 |
+
.env
|
| 16 |
+
htmlcov/
|
| 17 |
+
.idea/
|
| 18 |
+
# IDE & editor configs
|
| 19 |
+
# Local env (secrets)
|
| 20 |
+
# macOS
|
| 21 |
+
.mypy_cache/
|
| 22 |
+
__pycache__/
|
| 23 |
+
*.py[oc]
|
| 24 |
+
.pytest_cache/
|
| 25 |
+
# Python-generated files
|
| 26 |
+
.ruff_cache/
|
| 27 |
+
start_cf.sh
|
| 28 |
+
start_mock.sh
|
| 29 |
+
start.sh
|
| 30 |
+
*.swo
|
| 31 |
+
*.swp
|
| 32 |
+
# Test & coverage
|
| 33 |
+
# Type checking / linters
|
| 34 |
+
.venv
|
| 35 |
+
# Virtual environments
|
| 36 |
+
.vscode/
|
| 37 |
+
wheels/
|
Dockerfile
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM ubuntu:24.04
|
| 2 |
+
|
| 3 |
+
ARG TARGETARCH
|
| 4 |
+
ARG FINGERPRINT_CHROMIUM_URL_AMD64="https://github.com/adryfish/fingerprint-chromium/releases/download/142.0.7444.175/ungoogled-chromium-142.0.7444.175-1-x86_64_linux.tar.xz"
|
| 5 |
+
ARG FINGERPRINT_CHROMIUM_URL_ARM64_CHROMIUM_DEB="https://github.com/luispater/fingerprint-chromium-arm64/releases/download/135.0.7049.95-1/ungoogled-chromium_135.0.7049.95-1.deb12u1_arm64.deb"
|
| 6 |
+
ARG FINGERPRINT_CHROMIUM_URL_ARM64_COMMON_DEB="https://github.com/luispater/fingerprint-chromium-arm64/releases/download/135.0.7049.95-1/ungoogled-chromium-common_135.0.7049.95-1.deb12u1_arm64.deb"
|
| 7 |
+
ARG FINGERPRINT_CHROMIUM_URL_ARM64_SANDBOX_DEB="https://github.com/luispater/fingerprint-chromium-arm64/releases/download/135.0.7049.95-1/ungoogled-chromium-sandbox_135.0.7049.95-1.deb12u1_arm64.deb"
|
| 8 |
+
ARG FINGERPRINT_CHROMIUM_URL_ARM64_L10N_DEB="https://github.com/luispater/fingerprint-chromium-arm64/releases/download/135.0.7049.95-1/ungoogled-chromium-l10n_135.0.7049.95-1.deb12u1_all.deb"
|
| 9 |
+
|
| 10 |
+
ENV PYTHONDONTWRITEBYTECODE=1 \
|
| 11 |
+
PYTHONUNBUFFERED=1 \
|
| 12 |
+
PIP_NO_CACHE_DIR=1 \
|
| 13 |
+
WEB2API_DATA_DIR=/data \
|
| 14 |
+
HOME=/data
|
| 15 |
+
|
| 16 |
+
WORKDIR /app
|
| 17 |
+
|
| 18 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 19 |
+
ca-certificates \
|
| 20 |
+
curl \
|
| 21 |
+
xz-utils \
|
| 22 |
+
xvfb \
|
| 23 |
+
xauth \
|
| 24 |
+
python3 \
|
| 25 |
+
python3-pip \
|
| 26 |
+
python3-venv \
|
| 27 |
+
python-is-python3 \
|
| 28 |
+
software-properties-common \
|
| 29 |
+
fonts-liberation \
|
| 30 |
+
libasound2t64 \
|
| 31 |
+
libatk-bridge2.0-0t64 \
|
| 32 |
+
libatk1.0-0t64 \
|
| 33 |
+
libcairo2 \
|
| 34 |
+
libcups2t64 \
|
| 35 |
+
libdbus-1-3 \
|
| 36 |
+
libdrm2 \
|
| 37 |
+
libfontconfig1 \
|
| 38 |
+
libgbm1 \
|
| 39 |
+
libglib2.0-0t64 \
|
| 40 |
+
libgtk-3-0t64 \
|
| 41 |
+
libnspr4 \
|
| 42 |
+
libnss3 \
|
| 43 |
+
libpango-1.0-0 \
|
| 44 |
+
libu2f-udev \
|
| 45 |
+
libvulkan1 \
|
| 46 |
+
libx11-6 \
|
| 47 |
+
libx11-xcb1 \
|
| 48 |
+
libxcb1 \
|
| 49 |
+
libxcomposite1 \
|
| 50 |
+
libxdamage1 \
|
| 51 |
+
libxext6 \
|
| 52 |
+
libxfixes3 \
|
| 53 |
+
libxkbcommon0 \
|
| 54 |
+
libxrandr2 \
|
| 55 |
+
libxrender1 \
|
| 56 |
+
libxshmfence1 \
|
| 57 |
+
&& add-apt-repository -y universe \
|
| 58 |
+
&& add-apt-repository -y multiverse \
|
| 59 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 60 |
+
|
| 61 |
+
ENV VIRTUAL_ENV=/opt/venv \
|
| 62 |
+
PATH="/opt/venv/bin:$PATH"
|
| 63 |
+
|
| 64 |
+
RUN python -m venv "${VIRTUAL_ENV}" \
|
| 65 |
+
&& pip install --no-cache-dir --upgrade pip
|
| 66 |
+
|
| 67 |
+
RUN set -eux; \
|
| 68 |
+
arch="${TARGETARCH:-}"; \
|
| 69 |
+
if [ -z "${arch}" ]; then arch="$(dpkg --print-architecture)"; fi; \
|
| 70 |
+
mkdir -p /opt/fingerprint-chromium; \
|
| 71 |
+
case "${arch}" in \
|
| 72 |
+
amd64|x86_64) \
|
| 73 |
+
curl -L --fail --retry 5 --retry-delay 3 --retry-all-errors "${FINGERPRINT_CHROMIUM_URL_AMD64}" -o /tmp/fingerprint-chromium.tar.xz; \
|
| 74 |
+
tar -xf /tmp/fingerprint-chromium.tar.xz -C /opt/fingerprint-chromium --strip-components=1; \
|
| 75 |
+
rm -f /tmp/fingerprint-chromium.tar.xz; \
|
| 76 |
+
;; \
|
| 77 |
+
arm64|aarch64) \
|
| 78 |
+
curl -L --fail --retry 5 --retry-delay 3 --retry-all-errors "${FINGERPRINT_CHROMIUM_URL_ARM64_CHROMIUM_DEB}" -o /tmp/ungoogled-chromium.deb; \
|
| 79 |
+
curl -L --fail --retry 5 --retry-delay 3 --retry-all-errors "${FINGERPRINT_CHROMIUM_URL_ARM64_COMMON_DEB}" -o /tmp/ungoogled-chromium-common.deb; \
|
| 80 |
+
curl -L --fail --retry 5 --retry-delay 3 --retry-all-errors "${FINGERPRINT_CHROMIUM_URL_ARM64_SANDBOX_DEB}" -o /tmp/ungoogled-chromium-sandbox.deb; \
|
| 81 |
+
curl -L --fail --retry 5 --retry-delay 3 --retry-all-errors "${FINGERPRINT_CHROMIUM_URL_ARM64_L10N_DEB}" -o /tmp/ungoogled-chromium-l10n.deb; \
|
| 82 |
+
apt-get update; \
|
| 83 |
+
apt-get install -y --no-install-recommends /tmp/ungoogled-chromium.deb /tmp/ungoogled-chromium-common.deb /tmp/ungoogled-chromium-sandbox.deb /tmp/ungoogled-chromium-l10n.deb; \
|
| 84 |
+
rm -rf /var/lib/apt/lists/* /tmp/ungoogled-chromium*.deb; \
|
| 85 |
+
for bin in /usr/bin/ungoogled-chromium /usr/bin/chromium /usr/bin/chromium-browser; do \
|
| 86 |
+
if [ -x "${bin}" ]; then ln -sf "${bin}" /opt/fingerprint-chromium/chrome; break; fi; \
|
| 87 |
+
done; \
|
| 88 |
+
test -x /opt/fingerprint-chromium/chrome; \
|
| 89 |
+
;; \
|
| 90 |
+
*) \
|
| 91 |
+
echo "Unsupported architecture: ${arch}" >&2; \
|
| 92 |
+
exit 1; \
|
| 93 |
+
;; \
|
| 94 |
+
esac
|
| 95 |
+
|
| 96 |
+
COPY pyproject.toml /tmp/pyproject.toml
|
| 97 |
+
RUN python - <<'PY'
|
| 98 |
+
import subprocess
|
| 99 |
+
import tomllib
|
| 100 |
+
|
| 101 |
+
with open("/tmp/pyproject.toml", "rb") as f:
|
| 102 |
+
project = tomllib.load(f)["project"]
|
| 103 |
+
|
| 104 |
+
extra_deps = project.get("optional-dependencies", {}).get("postgres", [])
|
| 105 |
+
deps = [*project["dependencies"], *extra_deps]
|
| 106 |
+
subprocess.check_call(["pip", "install", "--no-cache-dir", *deps])
|
| 107 |
+
PY
|
| 108 |
+
|
| 109 |
+
COPY . /app
|
| 110 |
+
|
| 111 |
+
RUN chmod +x /app/docker/entrypoint.sh
|
| 112 |
+
|
| 113 |
+
VOLUME ["/data"]
|
| 114 |
+
EXPOSE 9000
|
| 115 |
+
|
| 116 |
+
ENTRYPOINT ["/app/docker/entrypoint.sh"]
|
README.md
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Web2API
|
| 3 |
+
emoji: 🧩
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: indigo
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 9000
|
| 8 |
+
pinned: false
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
# Web2API
|
| 12 |
+
|
| 13 |
+
Bridge Claude Web sessions to OpenAI / Anthropic compatible APIs. Runs as a Docker Space on Hugging Face.
|
| 14 |
+
|
| 15 |
+
## Endpoints
|
| 16 |
+
|
| 17 |
+
| Path | Protocol | Description |
|
| 18 |
+
|------|----------|-------------|
|
| 19 |
+
| `/claude/v1/models` | OpenAI | List available models |
|
| 20 |
+
| `/claude/v1/chat/completions` | OpenAI | Chat completions |
|
| 21 |
+
| `/claude/v1/messages` | Anthropic | Messages API |
|
| 22 |
+
| `/config` | — | Admin dashboard |
|
| 23 |
+
|
| 24 |
+
## Supported models
|
| 25 |
+
|
| 26 |
+
| Model ID | Upstream | Tier | Notes |
|
| 27 |
+
|----------|----------|------|-------|
|
| 28 |
+
| `claude-sonnet-4.6` | claude-sonnet-4-6 | Free | Sonnet 4.6 (default) |
|
| 29 |
+
| `claude-sonnet-4-5` | claude-sonnet-4-5 | Free | Sonnet 4.5 |
|
| 30 |
+
| `claude-sonnet-4-6-thinking` | claude-sonnet-4-6 | Free | Sonnet 4.6 extended thinking |
|
| 31 |
+
| `claude-sonnet-4-5-thinking` | claude-sonnet-4-5 | Free | Sonnet 4.5 extended thinking |
|
| 32 |
+
| `claude-haiku-4-5` | claude-haiku-4-5 | Pro | Haiku 4.5 (fastest) |
|
| 33 |
+
| `claude-haiku-4-5-thinking` | claude-haiku-4-5 | Pro | Haiku 4.5 extended thinking |
|
| 34 |
+
| `claude-opus-4-6` | claude-opus-4-6 | Pro | Opus 4.6 (most capable) |
|
| 35 |
+
| `claude-opus-4-6-thinking` | claude-opus-4-6 | Pro | Opus 4.6 extended thinking |
|
| 36 |
+
|
| 37 |
+
Pro models require a Claude Pro subscription and must be enabled in the config page.
|
| 38 |
+
|
| 39 |
+
## Quick start
|
| 40 |
+
|
| 41 |
+
1. Set required secrets in Space settings
|
| 42 |
+
2. Open `/login` → `/config`
|
| 43 |
+
3. Add a proxy group and a Claude account with `auth.sessionKey`
|
| 44 |
+
4. (Optional) Enable Pro models toggle if your account has a Pro subscription
|
| 45 |
+
5. Call the API:
|
| 46 |
+
|
| 47 |
+
```bash
|
| 48 |
+
# OpenAI format (streaming)
|
| 49 |
+
curl $SPACE_URL/claude/v1/chat/completions \
|
| 50 |
+
-H "Authorization: Bearer $API_KEY" \
|
| 51 |
+
-H "Content-Type: application/json" \
|
| 52 |
+
-d '{"model":"claude-sonnet-4.6","stream":true,"messages":[{"role":"user","content":"Hello"}]}'
|
| 53 |
+
|
| 54 |
+
# Anthropic format (streaming)
|
| 55 |
+
curl $SPACE_URL/claude/v1/messages \
|
| 56 |
+
-H "Authorization: Bearer $API_KEY" \
|
| 57 |
+
-H "Content-Type: application/json" \
|
| 58 |
+
-d '{"model":"claude-sonnet-4.6","stream":true,"max_tokens":1024,"messages":[{"role":"user","content":"Hello"}]}'
|
| 59 |
+
|
| 60 |
+
# Extended thinking
|
| 61 |
+
curl $SPACE_URL/claude/v1/chat/completions \
|
| 62 |
+
-H "Authorization: Bearer $API_KEY" \
|
| 63 |
+
-H "Content-Type: application/json" \
|
| 64 |
+
-d '{"model":"claude-sonnet-4-6-thinking","stream":true,"messages":[{"role":"user","content":"Solve this step by step: what is 23 * 47?"}]}'
|
| 65 |
+
```
|
| 66 |
+
|
| 67 |
+
## Required secrets
|
| 68 |
+
|
| 69 |
+
| Secret | Purpose |
|
| 70 |
+
|--------|---------|
|
| 71 |
+
| `WEB2API_AUTH_API_KEY` | API auth key for `/claude/v1/*` |
|
| 72 |
+
| `WEB2API_AUTH_CONFIG_SECRET` | Password for `/login` and `/config` |
|
| 73 |
+
| `WEB2API_DATABASE_URL` | PostgreSQL URL for persistent config (optional) |
|
| 74 |
+
|
| 75 |
+
## Recommended environment variables
|
| 76 |
+
|
| 77 |
+
For a small CPU Space:
|
| 78 |
+
|
| 79 |
+
```
|
| 80 |
+
WEB2API_BROWSER_NO_SANDBOX=true
|
| 81 |
+
WEB2API_BROWSER_DISABLE_GPU=true
|
| 82 |
+
WEB2API_BROWSER_DISABLE_GPU_SANDBOX=true
|
| 83 |
+
WEB2API_SCHEDULER_RESIDENT_BROWSER_COUNT=0
|
| 84 |
+
WEB2API_SCHEDULER_TAB_MAX_CONCURRENT=5
|
| 85 |
+
WEB2API_BROWSER_CDP_PORT_COUNT=6
|
| 86 |
+
```
|
core/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
新架构核心包:插件式 Web2API,按 type 路由,浏览器/context/page/会话树形缓存。
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
__all__ = []
|
core/account/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""账号池:按 type 过滤、轮询获取 (ProxyGroup, Account)。"""
|
| 2 |
+
|
| 3 |
+
from core.account.pool import AccountPool
|
| 4 |
+
|
| 5 |
+
__all__ = ["AccountPool"]
|
core/account/pool.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
账号池:从配置加载代理组与账号,按 type 轮询 acquire。
|
| 3 |
+
|
| 4 |
+
除基础的全局轮询外,还支持:
|
| 5 |
+
|
| 6 |
+
- 按 proxy_key 反查代理组
|
| 7 |
+
- 在指定代理组内选择某个 type 的可用账号
|
| 8 |
+
- 排除当前账号后为 tab 切号选择备选账号
|
| 9 |
+
- 在未打开浏览器的代理组中选择某个 type 的候选账号
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from dataclasses import replace
|
| 13 |
+
from typing import Iterator
|
| 14 |
+
|
| 15 |
+
from core.config.schema import AccountConfig, ProxyGroupConfig
|
| 16 |
+
from core.constants import TIMEZONE
|
| 17 |
+
from core.runtime.keys import ProxyKey
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class AccountPool:
|
| 21 |
+
"""
|
| 22 |
+
多 IP / 多账号池,按 type 过滤后轮询。
|
| 23 |
+
acquire(type) 返回 (ProxyGroupConfig, AccountConfig)。
|
| 24 |
+
get_group_by_proxy_key / acquire_from_group 供现役浏览器复用时使用。
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def __init__(self, groups: list[ProxyGroupConfig]) -> None:
|
| 28 |
+
self._groups = list(groups)
|
| 29 |
+
self._indices: dict[str, int] = {} # type -> 全局轮询下标
|
| 30 |
+
self._group_type_indices: dict[
|
| 31 |
+
tuple[str, str], int
|
| 32 |
+
] = {} # (fingerprint_id, type) -> 组内轮询下标
|
| 33 |
+
|
| 34 |
+
@classmethod
|
| 35 |
+
def from_groups(cls, groups: list[ProxyGroupConfig]) -> "AccountPool":
|
| 36 |
+
return cls(groups)
|
| 37 |
+
|
| 38 |
+
def reload(self, groups: list[ProxyGroupConfig]) -> None:
|
| 39 |
+
"""用新加载的配置替换当前组(如更新解冻时间后从 repository 重新 load_groups)。"""
|
| 40 |
+
self._groups = list(groups)
|
| 41 |
+
|
| 42 |
+
def groups(self) -> list[ProxyGroupConfig]:
|
| 43 |
+
"""返回当前全部代理组。"""
|
| 44 |
+
return list(self._groups)
|
| 45 |
+
|
| 46 |
+
def _accounts_by_type(
|
| 47 |
+
self, type_name: str
|
| 48 |
+
) -> Iterator[tuple[ProxyGroupConfig, AccountConfig]]:
|
| 49 |
+
"""按 type 遍历所有 (group, account),仅包含当前可用的账号(解冻时间已过或未设置)。"""
|
| 50 |
+
for g in self._groups:
|
| 51 |
+
for a in g.accounts:
|
| 52 |
+
if a.type == type_name and a.is_available():
|
| 53 |
+
yield g, a
|
| 54 |
+
|
| 55 |
+
def acquire(self, type_name: str) -> tuple[ProxyGroupConfig, AccountConfig]:
|
| 56 |
+
"""
|
| 57 |
+
按 type 轮询获取一组 (ProxyGroupConfig, AccountConfig)。
|
| 58 |
+
若该 type 无账号则抛出 ValueError。
|
| 59 |
+
"""
|
| 60 |
+
pairs = list(self._accounts_by_type(type_name))
|
| 61 |
+
if not pairs:
|
| 62 |
+
raise ValueError(f"没有类别为 {type_name!r} 的账号,请先在配置中添加")
|
| 63 |
+
n = len(pairs)
|
| 64 |
+
idx = self._indices.get(type_name, 0) % n
|
| 65 |
+
self._indices[type_name] = (idx + 1) % n
|
| 66 |
+
return pairs[idx]
|
| 67 |
+
|
| 68 |
+
def account_id(self, group: ProxyGroupConfig, account: AccountConfig) -> str:
|
| 69 |
+
"""生成账号唯一标识,用于会话缓存等。"""
|
| 70 |
+
return f"{group.fingerprint_id}:{account.name}"
|
| 71 |
+
|
| 72 |
+
def get_account_by_id(
|
| 73 |
+
self, account_id: str
|
| 74 |
+
) -> tuple[ProxyGroupConfig, AccountConfig] | None:
|
| 75 |
+
"""根据 account_id(fingerprint_id:name)反查 (group, account),用于复用会话时取 auth。"""
|
| 76 |
+
for g in self._groups:
|
| 77 |
+
for a in g.accounts:
|
| 78 |
+
if self.account_id(g, a) == account_id:
|
| 79 |
+
return g, a
|
| 80 |
+
return None
|
| 81 |
+
|
| 82 |
+
def get_group_by_proxy_key(self, proxy_key: ProxyKey) -> ProxyGroupConfig | None:
|
| 83 |
+
"""根据 proxy_key(proxy_host, proxy_user, fingerprint_id, use_proxy, timezone)反查对应代理组。"""
|
| 84 |
+
pk_tz = getattr(proxy_key, "timezone", None) or TIMEZONE
|
| 85 |
+
for g in self._groups:
|
| 86 |
+
g_tz = g.timezone or TIMEZONE
|
| 87 |
+
if (
|
| 88 |
+
g.proxy_host == proxy_key.proxy_host
|
| 89 |
+
and g.proxy_user == proxy_key.proxy_user
|
| 90 |
+
and g.fingerprint_id == proxy_key.fingerprint_id
|
| 91 |
+
and g.use_proxy == getattr(proxy_key, "use_proxy", True)
|
| 92 |
+
and g_tz == pk_tz
|
| 93 |
+
):
|
| 94 |
+
return g
|
| 95 |
+
return None
|
| 96 |
+
|
| 97 |
+
def acquire_from_group(
|
| 98 |
+
self,
|
| 99 |
+
group: ProxyGroupConfig,
|
| 100 |
+
type_name: str,
|
| 101 |
+
) -> tuple[ProxyGroupConfig, AccountConfig] | None:
|
| 102 |
+
"""
|
| 103 |
+
从指定 group 内按 type 轮询取一个账号;若无该 type 则返回 None。
|
| 104 |
+
供「现役浏览器对应 IP 组是否还有该 type 可用」时使用。
|
| 105 |
+
"""
|
| 106 |
+
pairs = [(g, a) for g, a in self._accounts_by_type(type_name) if g is group]
|
| 107 |
+
if not pairs:
|
| 108 |
+
return None
|
| 109 |
+
n = len(pairs)
|
| 110 |
+
key = (group.fingerprint_id, type_name)
|
| 111 |
+
idx = self._group_type_indices.get(key, 0) % n
|
| 112 |
+
self._group_type_indices[key] = (idx + 1) % n
|
| 113 |
+
return pairs[idx]
|
| 114 |
+
|
| 115 |
+
def available_accounts_in_group(
|
| 116 |
+
self,
|
| 117 |
+
group: ProxyGroupConfig,
|
| 118 |
+
type_name: str,
|
| 119 |
+
*,
|
| 120 |
+
exclude_account_ids: set[str] | None = None,
|
| 121 |
+
) -> list[AccountConfig]:
|
| 122 |
+
"""返回某代理组下指定 type 的全部可用账号,可排除若干 account_id。"""
|
| 123 |
+
exclude = exclude_account_ids or set()
|
| 124 |
+
return [
|
| 125 |
+
a
|
| 126 |
+
for g, a in self._accounts_by_type(type_name)
|
| 127 |
+
if g is group and self.account_id(group, a) not in exclude
|
| 128 |
+
]
|
| 129 |
+
|
| 130 |
+
def has_available_account_in_group(
|
| 131 |
+
self,
|
| 132 |
+
group: ProxyGroupConfig,
|
| 133 |
+
type_name: str,
|
| 134 |
+
*,
|
| 135 |
+
exclude_account_ids: set[str] | None = None,
|
| 136 |
+
) -> bool:
|
| 137 |
+
"""判断某代理组下是否仍有指定 type 的可用账号。"""
|
| 138 |
+
return bool(
|
| 139 |
+
self.available_accounts_in_group(
|
| 140 |
+
group,
|
| 141 |
+
type_name,
|
| 142 |
+
exclude_account_ids=exclude_account_ids,
|
| 143 |
+
)
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
def next_available_account_in_group(
|
| 147 |
+
self,
|
| 148 |
+
group: ProxyGroupConfig,
|
| 149 |
+
type_name: str,
|
| 150 |
+
*,
|
| 151 |
+
exclude_account_ids: set[str] | None = None,
|
| 152 |
+
) -> AccountConfig | None:
|
| 153 |
+
"""
|
| 154 |
+
在指定代理组内按轮询选择一个可用账号。
|
| 155 |
+
支持排除当前已绑定账号,用于 drained 后切号。
|
| 156 |
+
"""
|
| 157 |
+
accounts = self.available_accounts_in_group(
|
| 158 |
+
group,
|
| 159 |
+
type_name,
|
| 160 |
+
exclude_account_ids=exclude_account_ids,
|
| 161 |
+
)
|
| 162 |
+
if not accounts:
|
| 163 |
+
return None
|
| 164 |
+
n = len(accounts)
|
| 165 |
+
key = (group.fingerprint_id, type_name)
|
| 166 |
+
idx = self._group_type_indices.get(key, 0) % n
|
| 167 |
+
self._group_type_indices[key] = (idx + 1) % n
|
| 168 |
+
return accounts[idx]
|
| 169 |
+
|
| 170 |
+
def next_available_pair(
|
| 171 |
+
self,
|
| 172 |
+
type_name: str,
|
| 173 |
+
*,
|
| 174 |
+
exclude_fingerprint_ids: set[str] | None = None,
|
| 175 |
+
) -> tuple[ProxyGroupConfig, AccountConfig] | None:
|
| 176 |
+
"""
|
| 177 |
+
全局按 type 轮询选择一个可用账号,可排除若干代理组。
|
| 178 |
+
用于“未打开浏览器的组里挑一个候选账号”。
|
| 179 |
+
"""
|
| 180 |
+
exclude = exclude_fingerprint_ids or set()
|
| 181 |
+
pairs = [
|
| 182 |
+
(g, a)
|
| 183 |
+
for g, a in self._accounts_by_type(type_name)
|
| 184 |
+
if g.fingerprint_id not in exclude
|
| 185 |
+
]
|
| 186 |
+
if not pairs:
|
| 187 |
+
return None
|
| 188 |
+
n = len(pairs)
|
| 189 |
+
idx = self._indices.get(type_name, 0) % n
|
| 190 |
+
self._indices[type_name] = (idx + 1) % n
|
| 191 |
+
return pairs[idx]
|
| 192 |
+
|
| 193 |
+
def update_account_unfreeze_at(
|
| 194 |
+
self,
|
| 195 |
+
fingerprint_id: str,
|
| 196 |
+
account_name: str,
|
| 197 |
+
unfreeze_at: int | None,
|
| 198 |
+
) -> bool:
|
| 199 |
+
for group in self._groups:
|
| 200 |
+
if group.fingerprint_id != fingerprint_id:
|
| 201 |
+
continue
|
| 202 |
+
for index, account in enumerate(group.accounts):
|
| 203 |
+
if account.name != account_name:
|
| 204 |
+
continue
|
| 205 |
+
group.accounts[index] = replace(account, unfreeze_at=unfreeze_at)
|
| 206 |
+
return True
|
| 207 |
+
return False
|
core/api/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""API 层:OpenAI 兼容路由、会话解析、聊天编排。"""
|
| 2 |
+
|
| 3 |
+
__all__ = []
|
core/api/anthropic_routes.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Anthropic 协议路由。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import json
|
| 6 |
+
from collections.abc import AsyncIterator
|
| 7 |
+
from typing import Any
|
| 8 |
+
|
| 9 |
+
from fastapi import APIRouter, Depends, Request
|
| 10 |
+
from fastapi.responses import JSONResponse, StreamingResponse
|
| 11 |
+
|
| 12 |
+
from core.api.auth import require_api_key
|
| 13 |
+
from core.api.chat_handler import ChatHandler
|
| 14 |
+
from core.api.routes import get_chat_handler, resolve_request_model, check_pro_model_access
|
| 15 |
+
from core.protocol.anthropic import AnthropicProtocolAdapter
|
| 16 |
+
from core.protocol.service import CanonicalChatService
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def create_anthropic_router() -> APIRouter:
|
| 20 |
+
router = APIRouter(dependencies=[Depends(require_api_key)])
|
| 21 |
+
adapter = AnthropicProtocolAdapter()
|
| 22 |
+
|
| 23 |
+
@router.post("/anthropic/{provider}/v1/messages")
|
| 24 |
+
async def messages(
|
| 25 |
+
provider: str,
|
| 26 |
+
request: Request,
|
| 27 |
+
handler: ChatHandler = Depends(get_chat_handler),
|
| 28 |
+
) -> Any:
|
| 29 |
+
return await _messages(provider, request, handler)
|
| 30 |
+
|
| 31 |
+
@router.post("/{provider}/v1/messages")
|
| 32 |
+
async def messages_legacy(
|
| 33 |
+
provider: str,
|
| 34 |
+
request: Request,
|
| 35 |
+
handler: ChatHandler = Depends(get_chat_handler),
|
| 36 |
+
) -> Any:
|
| 37 |
+
return await _messages(provider, request, handler)
|
| 38 |
+
|
| 39 |
+
async def _messages(
|
| 40 |
+
provider: str,
|
| 41 |
+
request: Request,
|
| 42 |
+
handler: ChatHandler,
|
| 43 |
+
) -> Any:
|
| 44 |
+
raw_body = await request.json()
|
| 45 |
+
try:
|
| 46 |
+
canonical_req = resolve_request_model(
|
| 47 |
+
provider,
|
| 48 |
+
adapter.parse_request(provider, raw_body),
|
| 49 |
+
)
|
| 50 |
+
except Exception as exc:
|
| 51 |
+
status, payload = adapter.render_error(exc)
|
| 52 |
+
return JSONResponse(status_code=status, content=payload)
|
| 53 |
+
|
| 54 |
+
pro_err = check_pro_model_access(request, provider, canonical_req.model)
|
| 55 |
+
if pro_err is not None:
|
| 56 |
+
return pro_err
|
| 57 |
+
|
| 58 |
+
service = CanonicalChatService(handler)
|
| 59 |
+
if canonical_req.stream:
|
| 60 |
+
|
| 61 |
+
async def sse_stream() -> AsyncIterator[str]:
|
| 62 |
+
try:
|
| 63 |
+
async for event in adapter.render_stream(
|
| 64 |
+
canonical_req,
|
| 65 |
+
service.stream_raw(canonical_req),
|
| 66 |
+
):
|
| 67 |
+
yield event
|
| 68 |
+
except Exception as exc:
|
| 69 |
+
status, payload = adapter.render_error(exc)
|
| 70 |
+
del status
|
| 71 |
+
yield (
|
| 72 |
+
"event: error\n"
|
| 73 |
+
f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
return StreamingResponse(
|
| 77 |
+
sse_stream(),
|
| 78 |
+
media_type="text/event-stream",
|
| 79 |
+
headers={
|
| 80 |
+
"Cache-Control": "no-cache",
|
| 81 |
+
"Connection": "keep-alive",
|
| 82 |
+
"X-Accel-Buffering": "no",
|
| 83 |
+
},
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
try:
|
| 87 |
+
raw_events = await service.collect_raw(canonical_req)
|
| 88 |
+
return adapter.render_non_stream(canonical_req, raw_events)
|
| 89 |
+
except Exception as exc:
|
| 90 |
+
status, payload = adapter.render_error(exc)
|
| 91 |
+
return JSONResponse(status_code=status, content=payload)
|
| 92 |
+
|
| 93 |
+
return router
|
core/api/auth.py
ADDED
|
@@ -0,0 +1,455 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
API 与配置页鉴权。
|
| 3 |
+
|
| 4 |
+
- auth.api_key: 保护 /{type}/v1/*
|
| 5 |
+
- auth.config_secret: 保护 /config 与 /api/config、/api/types
|
| 6 |
+
|
| 7 |
+
全局鉴权设置优先级:数据库 > 环境变量回退 > YAML > 默认值。
|
| 8 |
+
config_secret 在文件模式下会回写为带前缀的 PBKDF2 哈希;环境变量回退模式下仅在内存中哈希。
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import base64
|
| 14 |
+
import hashlib
|
| 15 |
+
import hmac
|
| 16 |
+
import os
|
| 17 |
+
import re
|
| 18 |
+
import secrets
|
| 19 |
+
import time
|
| 20 |
+
from dataclasses import dataclass, field
|
| 21 |
+
from functools import lru_cache
|
| 22 |
+
from typing import Any, Literal
|
| 23 |
+
|
| 24 |
+
from fastapi import HTTPException, Request, status
|
| 25 |
+
|
| 26 |
+
from core.config.repository import (
|
| 27 |
+
APP_SETTING_AUTH_API_KEY,
|
| 28 |
+
APP_SETTING_AUTH_CONFIG_SECRET_HASH,
|
| 29 |
+
ConfigRepository,
|
| 30 |
+
)
|
| 31 |
+
from core.config.settings import (
|
| 32 |
+
get,
|
| 33 |
+
get_config_path,
|
| 34 |
+
has_env_override,
|
| 35 |
+
load_config,
|
| 36 |
+
reset_cache,
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
API_AUTH_REALM = "Bearer"
|
| 40 |
+
CONFIG_SECRET_PREFIX = "web2api_pbkdf2_sha256"
|
| 41 |
+
CONFIG_SECRET_ITERATIONS = 600_000
|
| 42 |
+
ADMIN_SESSION_COOKIE = "web2api_admin_session"
|
| 43 |
+
DEFAULT_ADMIN_SESSION_TTL_SECONDS = 7 * 24 * 60 * 60
|
| 44 |
+
DEFAULT_ADMIN_LOGIN_MAX_FAILURES = 5
|
| 45 |
+
DEFAULT_ADMIN_LOGIN_LOCK_SECONDS = 10 * 60
|
| 46 |
+
AuthSource = Literal["env", "db", "yaml", "default"]
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@dataclass(frozen=True)
|
| 50 |
+
class EffectiveAuthSettings:
|
| 51 |
+
api_key_text: str
|
| 52 |
+
api_key_source: AuthSource
|
| 53 |
+
config_secret_hash: str
|
| 54 |
+
config_secret_source: AuthSource
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def api_keys(self) -> list[str]:
|
| 58 |
+
return parse_api_keys(self.api_key_text)
|
| 59 |
+
|
| 60 |
+
@property
|
| 61 |
+
def api_key_env_managed(self) -> bool:
|
| 62 |
+
return False
|
| 63 |
+
|
| 64 |
+
@property
|
| 65 |
+
def config_secret_env_managed(self) -> bool:
|
| 66 |
+
return False
|
| 67 |
+
|
| 68 |
+
@property
|
| 69 |
+
def config_login_enabled(self) -> bool:
|
| 70 |
+
return bool(self.config_secret_hash)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def parse_api_keys(raw: Any) -> list[str]:
|
| 74 |
+
if isinstance(raw, list):
|
| 75 |
+
return [str(item).strip() for item in raw if str(item).strip()]
|
| 76 |
+
if raw is None:
|
| 77 |
+
return []
|
| 78 |
+
text = str(raw).replace("\n", ",")
|
| 79 |
+
return [part.strip() for part in text.split(",") if part.strip()]
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def normalize_api_key_text(raw: Any) -> str:
|
| 83 |
+
if isinstance(raw, list):
|
| 84 |
+
return "\n".join(str(item).strip() for item in raw if str(item).strip())
|
| 85 |
+
return str(raw or "").strip()
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _yaml_auth_config() -> dict[str, Any]:
|
| 89 |
+
auth_cfg = load_config().get("auth") or {}
|
| 90 |
+
return auth_cfg if isinstance(auth_cfg, dict) else {}
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _normalize_config_secret_hash(value: Any) -> str:
|
| 94 |
+
secret = str(value or "").strip()
|
| 95 |
+
if not secret:
|
| 96 |
+
return ""
|
| 97 |
+
return secret if _is_hashed_config_secret(secret) else hash_config_secret(secret)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
@lru_cache(maxsize=1)
|
| 101 |
+
def _hosted_config_secret_hash() -> str:
|
| 102 |
+
return _normalize_config_secret_hash(get("auth", "config_secret", ""))
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def build_effective_auth_settings(
|
| 106 |
+
repo: ConfigRepository | None = None,
|
| 107 |
+
) -> EffectiveAuthSettings:
|
| 108 |
+
stored = repo.load_app_settings() if repo is not None else {}
|
| 109 |
+
yaml_auth = _yaml_auth_config()
|
| 110 |
+
|
| 111 |
+
if APP_SETTING_AUTH_API_KEY in stored:
|
| 112 |
+
api_key_text = normalize_api_key_text(stored.get(APP_SETTING_AUTH_API_KEY, ""))
|
| 113 |
+
api_key_source: AuthSource = "db"
|
| 114 |
+
elif has_env_override("auth", "api_key"):
|
| 115 |
+
api_key_text = normalize_api_key_text(get("auth", "api_key", ""))
|
| 116 |
+
api_key_source = "env"
|
| 117 |
+
elif "api_key" in yaml_auth:
|
| 118 |
+
api_key_text = normalize_api_key_text(yaml_auth.get("api_key", ""))
|
| 119 |
+
api_key_source = "yaml"
|
| 120 |
+
else:
|
| 121 |
+
api_key_text = ""
|
| 122 |
+
api_key_source = "default"
|
| 123 |
+
|
| 124 |
+
if APP_SETTING_AUTH_CONFIG_SECRET_HASH in stored:
|
| 125 |
+
config_secret_hash = _normalize_config_secret_hash(
|
| 126 |
+
stored.get(APP_SETTING_AUTH_CONFIG_SECRET_HASH, "")
|
| 127 |
+
)
|
| 128 |
+
config_secret_source: AuthSource = "db"
|
| 129 |
+
elif has_env_override("auth", "config_secret"):
|
| 130 |
+
config_secret_hash = _hosted_config_secret_hash()
|
| 131 |
+
config_secret_source = "env"
|
| 132 |
+
elif "config_secret" in yaml_auth:
|
| 133 |
+
config_secret_hash = _normalize_config_secret_hash(yaml_auth.get("config_secret", ""))
|
| 134 |
+
config_secret_source = "yaml"
|
| 135 |
+
else:
|
| 136 |
+
config_secret_hash = ""
|
| 137 |
+
config_secret_source = "default"
|
| 138 |
+
|
| 139 |
+
return EffectiveAuthSettings(
|
| 140 |
+
api_key_text=api_key_text,
|
| 141 |
+
api_key_source=api_key_source,
|
| 142 |
+
config_secret_hash=config_secret_hash,
|
| 143 |
+
config_secret_source=config_secret_source,
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def refresh_runtime_auth_settings(app: Any) -> EffectiveAuthSettings:
|
| 148 |
+
repo = getattr(app.state, "config_repo", None)
|
| 149 |
+
settings = build_effective_auth_settings(repo)
|
| 150 |
+
app.state.auth_settings = settings
|
| 151 |
+
return settings
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def get_effective_auth_settings(request: Request | None = None) -> EffectiveAuthSettings:
|
| 155 |
+
if request is not None:
|
| 156 |
+
settings = getattr(request.app.state, "auth_settings", None)
|
| 157 |
+
if isinstance(settings, EffectiveAuthSettings):
|
| 158 |
+
return settings
|
| 159 |
+
repo = getattr(request.app.state, "config_repo", None)
|
| 160 |
+
return build_effective_auth_settings(repo)
|
| 161 |
+
return build_effective_auth_settings()
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def configured_api_keys(repo: ConfigRepository | None = None) -> list[str]:
|
| 165 |
+
return build_effective_auth_settings(repo).api_keys
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def _extract_request_api_key(request: Request) -> str:
|
| 169 |
+
key = (request.headers.get("x-api-key") or "").strip()
|
| 170 |
+
if key:
|
| 171 |
+
return key
|
| 172 |
+
authorization = (request.headers.get("authorization") or "").strip()
|
| 173 |
+
if authorization.lower().startswith("bearer "):
|
| 174 |
+
return authorization[7:].strip()
|
| 175 |
+
return ""
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def require_api_key(request: Request) -> None:
|
| 179 |
+
expected_keys = get_effective_auth_settings(request).api_keys
|
| 180 |
+
if not expected_keys:
|
| 181 |
+
return
|
| 182 |
+
provided = _extract_request_api_key(request)
|
| 183 |
+
if provided:
|
| 184 |
+
for expected in expected_keys:
|
| 185 |
+
if secrets.compare_digest(provided, expected):
|
| 186 |
+
return
|
| 187 |
+
raise HTTPException(
|
| 188 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 189 |
+
detail="Unauthorized. Provide a valid API key.",
|
| 190 |
+
headers={"WWW-Authenticate": API_AUTH_REALM},
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def _is_hashed_config_secret(value: str) -> bool:
|
| 195 |
+
return value.startswith(f"{CONFIG_SECRET_PREFIX}$")
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def configured_config_secret_hash(repo: ConfigRepository | None = None) -> str:
|
| 199 |
+
return build_effective_auth_settings(repo).config_secret_hash
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def config_login_enabled(request: Request | None = None) -> bool:
|
| 203 |
+
return get_effective_auth_settings(request).config_login_enabled
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def configured_config_login_max_failures() -> int:
|
| 207 |
+
raw = get("auth", "config_login_max_failures", DEFAULT_ADMIN_LOGIN_MAX_FAILURES)
|
| 208 |
+
try:
|
| 209 |
+
return max(1, int(raw))
|
| 210 |
+
except Exception:
|
| 211 |
+
return DEFAULT_ADMIN_LOGIN_MAX_FAILURES
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def configured_config_login_lock_seconds() -> int:
|
| 215 |
+
raw = get("auth", "config_login_lock_seconds", DEFAULT_ADMIN_LOGIN_LOCK_SECONDS)
|
| 216 |
+
try:
|
| 217 |
+
return max(1, int(raw))
|
| 218 |
+
except Exception:
|
| 219 |
+
return DEFAULT_ADMIN_LOGIN_LOCK_SECONDS
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def hash_config_secret(secret: str) -> str:
|
| 223 |
+
salt = os.urandom(16)
|
| 224 |
+
digest = hashlib.pbkdf2_hmac(
|
| 225 |
+
"sha256",
|
| 226 |
+
secret.encode("utf-8"),
|
| 227 |
+
salt,
|
| 228 |
+
CONFIG_SECRET_ITERATIONS,
|
| 229 |
+
)
|
| 230 |
+
return (
|
| 231 |
+
f"{CONFIG_SECRET_PREFIX}"
|
| 232 |
+
f"${CONFIG_SECRET_ITERATIONS}"
|
| 233 |
+
f"${base64.urlsafe_b64encode(salt).decode('ascii')}"
|
| 234 |
+
f"${base64.urlsafe_b64encode(digest).decode('ascii')}"
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def verify_config_secret(secret: str, encoded: str) -> bool:
|
| 239 |
+
try:
|
| 240 |
+
prefix, iterations_s, salt_b64, digest_b64 = encoded.split("$", 3)
|
| 241 |
+
except ValueError:
|
| 242 |
+
return False
|
| 243 |
+
if prefix != CONFIG_SECRET_PREFIX:
|
| 244 |
+
return False
|
| 245 |
+
try:
|
| 246 |
+
iterations = int(iterations_s)
|
| 247 |
+
salt = base64.urlsafe_b64decode(salt_b64.encode("ascii"))
|
| 248 |
+
expected = base64.urlsafe_b64decode(digest_b64.encode("ascii"))
|
| 249 |
+
except Exception:
|
| 250 |
+
return False
|
| 251 |
+
actual = hashlib.pbkdf2_hmac(
|
| 252 |
+
"sha256",
|
| 253 |
+
secret.encode("utf-8"),
|
| 254 |
+
salt,
|
| 255 |
+
iterations,
|
| 256 |
+
)
|
| 257 |
+
return hmac.compare_digest(actual, expected)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def ensure_config_secret_hashed(repo: ConfigRepository | None = None) -> None:
|
| 261 |
+
if has_env_override("auth", "config_secret"):
|
| 262 |
+
_hosted_config_secret_hash()
|
| 263 |
+
return
|
| 264 |
+
if repo is not None and repo.get_app_setting(APP_SETTING_AUTH_CONFIG_SECRET_HASH) is not None:
|
| 265 |
+
return
|
| 266 |
+
cfg = load_config()
|
| 267 |
+
auth_cfg = cfg.get("auth")
|
| 268 |
+
if not isinstance(auth_cfg, dict):
|
| 269 |
+
return
|
| 270 |
+
raw_value = auth_cfg.get("config_secret")
|
| 271 |
+
secret = str(raw_value or "").strip()
|
| 272 |
+
if not secret or _is_hashed_config_secret(secret):
|
| 273 |
+
return
|
| 274 |
+
encoded = hash_config_secret(secret)
|
| 275 |
+
config_path = get_config_path()
|
| 276 |
+
if not config_path.exists():
|
| 277 |
+
return
|
| 278 |
+
original = config_path.read_text(encoding="utf-8")
|
| 279 |
+
pattern = re.compile(r"^([ \t]*)config_secret\s*:\s*.*$", re.MULTILINE)
|
| 280 |
+
replacement = None
|
| 281 |
+
for line in original.splitlines():
|
| 282 |
+
match = pattern.match(line)
|
| 283 |
+
if match:
|
| 284 |
+
replacement = f"{match.group(1)}config_secret: '{encoded}'"
|
| 285 |
+
break
|
| 286 |
+
updated: str
|
| 287 |
+
if replacement is not None:
|
| 288 |
+
updated, count = pattern.subn(replacement, original, count=1)
|
| 289 |
+
if count != 1:
|
| 290 |
+
return
|
| 291 |
+
else:
|
| 292 |
+
auth_pattern = re.compile(r"^auth\s*:\s*$", re.MULTILINE)
|
| 293 |
+
match = auth_pattern.search(original)
|
| 294 |
+
if match:
|
| 295 |
+
insert_at = match.end()
|
| 296 |
+
updated = (
|
| 297 |
+
original[:insert_at]
|
| 298 |
+
+ "\n"
|
| 299 |
+
+ f" config_secret: '{encoded}'"
|
| 300 |
+
+ original[insert_at:]
|
| 301 |
+
)
|
| 302 |
+
else:
|
| 303 |
+
suffix = "" if original.endswith("\n") or not original else "\n"
|
| 304 |
+
updated = (
|
| 305 |
+
original
|
| 306 |
+
+ suffix
|
| 307 |
+
+ "auth:\n"
|
| 308 |
+
+ f" config_secret: '{encoded}'\n"
|
| 309 |
+
)
|
| 310 |
+
tmp_path = config_path.with_suffix(config_path.suffix + ".tmp")
|
| 311 |
+
tmp_path.write_text(updated, encoding="utf-8")
|
| 312 |
+
tmp_path.replace(config_path)
|
| 313 |
+
reset_cache()
|
| 314 |
+
load_config()
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
@dataclass
|
| 318 |
+
class AdminSessionStore:
|
| 319 |
+
ttl_seconds: int = DEFAULT_ADMIN_SESSION_TTL_SECONDS
|
| 320 |
+
_sessions: dict[str, float] = field(default_factory=dict)
|
| 321 |
+
|
| 322 |
+
def create(self) -> str:
|
| 323 |
+
token = secrets.token_urlsafe(32)
|
| 324 |
+
self._sessions[token] = time.time() + self.ttl_seconds
|
| 325 |
+
return token
|
| 326 |
+
|
| 327 |
+
def is_valid(self, token: str) -> bool:
|
| 328 |
+
if not token:
|
| 329 |
+
return False
|
| 330 |
+
self.cleanup()
|
| 331 |
+
expires_at = self._sessions.get(token)
|
| 332 |
+
if expires_at is None:
|
| 333 |
+
return False
|
| 334 |
+
if expires_at < time.time():
|
| 335 |
+
self._sessions.pop(token, None)
|
| 336 |
+
return False
|
| 337 |
+
return True
|
| 338 |
+
|
| 339 |
+
def revoke(self, token: str) -> None:
|
| 340 |
+
if token:
|
| 341 |
+
self._sessions.pop(token, None)
|
| 342 |
+
|
| 343 |
+
def cleanup(self) -> None:
|
| 344 |
+
now = time.time()
|
| 345 |
+
expired = [token for token, expires_at in self._sessions.items() if expires_at < now]
|
| 346 |
+
for token in expired:
|
| 347 |
+
self._sessions.pop(token, None)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
@dataclass
|
| 351 |
+
class LoginAttemptState:
|
| 352 |
+
failures: int = 0
|
| 353 |
+
locked_until: float = 0.0
|
| 354 |
+
last_seen: float = 0.0
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
@dataclass
|
| 358 |
+
class AdminLoginAttemptStore:
|
| 359 |
+
max_failures: int = DEFAULT_ADMIN_LOGIN_MAX_FAILURES
|
| 360 |
+
lock_seconds: int = DEFAULT_ADMIN_LOGIN_LOCK_SECONDS
|
| 361 |
+
_attempts: dict[str, LoginAttemptState] = field(default_factory=dict)
|
| 362 |
+
|
| 363 |
+
def is_locked(self, client_ip: str) -> int:
|
| 364 |
+
self.cleanup()
|
| 365 |
+
state = self._attempts.get(client_ip)
|
| 366 |
+
if state is None:
|
| 367 |
+
return 0
|
| 368 |
+
remaining = int(state.locked_until - time.time())
|
| 369 |
+
if remaining <= 0:
|
| 370 |
+
return 0
|
| 371 |
+
return remaining
|
| 372 |
+
|
| 373 |
+
def record_failure(self, client_ip: str) -> int:
|
| 374 |
+
now = time.time()
|
| 375 |
+
state = self._attempts.setdefault(client_ip, LoginAttemptState())
|
| 376 |
+
if state.locked_until > now:
|
| 377 |
+
state.last_seen = now
|
| 378 |
+
return int(state.locked_until - now)
|
| 379 |
+
state.failures += 1
|
| 380 |
+
state.last_seen = now
|
| 381 |
+
if state.failures >= self.max_failures:
|
| 382 |
+
state.failures = 0
|
| 383 |
+
state.locked_until = now + self.lock_seconds
|
| 384 |
+
return self.lock_seconds
|
| 385 |
+
return 0
|
| 386 |
+
|
| 387 |
+
def record_success(self, client_ip: str) -> None:
|
| 388 |
+
self._attempts.pop(client_ip, None)
|
| 389 |
+
|
| 390 |
+
def cleanup(self) -> None:
|
| 391 |
+
now = time.time()
|
| 392 |
+
stale_before = now - max(self.lock_seconds * 2, 3600)
|
| 393 |
+
expired = [
|
| 394 |
+
ip
|
| 395 |
+
for ip, state in self._attempts.items()
|
| 396 |
+
if state.locked_until <= now and state.last_seen < stale_before
|
| 397 |
+
]
|
| 398 |
+
for ip in expired:
|
| 399 |
+
self._attempts.pop(ip, None)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
def _admin_store(request: Request) -> AdminSessionStore:
|
| 403 |
+
store = getattr(request.app.state, "admin_sessions", None)
|
| 404 |
+
if store is None:
|
| 405 |
+
raise HTTPException(status_code=503, detail="Admin session store is unavailable")
|
| 406 |
+
return store
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
def _admin_login_attempt_store(request: Request) -> AdminLoginAttemptStore:
|
| 410 |
+
store = getattr(request.app.state, "admin_login_attempts", None)
|
| 411 |
+
if store is None:
|
| 412 |
+
raise HTTPException(status_code=503, detail="Login rate limiter is unavailable")
|
| 413 |
+
return store
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def client_ip_of(request: Request) -> str:
|
| 417 |
+
client = getattr(request, "client", None)
|
| 418 |
+
host = getattr(client, "host", None)
|
| 419 |
+
return str(host or "unknown")
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def check_admin_login_rate_limit(request: Request) -> None:
|
| 423 |
+
remaining = _admin_login_attempt_store(request).is_locked(client_ip_of(request))
|
| 424 |
+
if remaining > 0:
|
| 425 |
+
raise HTTPException(
|
| 426 |
+
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
| 427 |
+
detail=f"Too many failed login attempts. Try again in {remaining} seconds.",
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def record_admin_login_failure(request: Request) -> int:
|
| 432 |
+
return _admin_login_attempt_store(request).record_failure(client_ip_of(request))
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
def record_admin_login_success(request: Request) -> None:
|
| 436 |
+
_admin_login_attempt_store(request).record_success(client_ip_of(request))
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def admin_logged_in(request: Request) -> bool:
|
| 440 |
+
if not config_login_enabled(request):
|
| 441 |
+
return False
|
| 442 |
+
token = (request.cookies.get(ADMIN_SESSION_COOKIE) or "").strip()
|
| 443 |
+
return _admin_store(request).is_valid(token)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def require_config_login_enabled(request: Request | None = None) -> None:
|
| 447 |
+
if not config_login_enabled(request):
|
| 448 |
+
raise HTTPException(status_code=404, detail="Config dashboard is disabled")
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
def require_config_login(request: Request) -> None:
|
| 452 |
+
require_config_login_enabled(request)
|
| 453 |
+
if admin_logged_in(request):
|
| 454 |
+
return
|
| 455 |
+
raise HTTPException(status_code=401, detail="Please sign in to access the config dashboard")
|
core/api/chat_handler.py
ADDED
|
@@ -0,0 +1,1106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
聊天请求编排:解析 session_id、调度 browser/tab/session、调用插件流式补全,
|
| 3 |
+
并在响应末尾附加零宽字符编码的会话 ID。
|
| 4 |
+
|
| 5 |
+
当前调度模型:
|
| 6 |
+
|
| 7 |
+
- 一个浏览器对应一个代理组
|
| 8 |
+
- 一个浏览器内,一个 type 只有一个 tab
|
| 9 |
+
- 一个 tab 绑定一个 account,只有 drained 后才能切号
|
| 10 |
+
- 一个 session 绑定到某个 tab/account;复用成功时不传完整历史
|
| 11 |
+
- 无法复用时,新建会话并回放完整历史
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
from __future__ import annotations
|
| 15 |
+
|
| 16 |
+
import asyncio
|
| 17 |
+
import json
|
| 18 |
+
import logging
|
| 19 |
+
import time
|
| 20 |
+
import uuid
|
| 21 |
+
from dataclasses import dataclass
|
| 22 |
+
from pathlib import Path
|
| 23 |
+
from typing import Any, AsyncIterator, cast
|
| 24 |
+
|
| 25 |
+
from playwright.async_api import BrowserContext, Page
|
| 26 |
+
|
| 27 |
+
from core.account.pool import AccountPool
|
| 28 |
+
from core.config.repository import ConfigRepository
|
| 29 |
+
from core.config.schema import AccountConfig, ProxyGroupConfig
|
| 30 |
+
from core.config.settings import get
|
| 31 |
+
from core.constants import TIMEZONE
|
| 32 |
+
from core.plugin.base import (
|
| 33 |
+
AccountFrozenError,
|
| 34 |
+
BaseSitePlugin,
|
| 35 |
+
BrowserResourceInvalidError,
|
| 36 |
+
PluginRegistry,
|
| 37 |
+
)
|
| 38 |
+
from core.plugin.helpers import clear_cookies_for_domain
|
| 39 |
+
from core.runtime.browser_manager import BrowserManager, ClosedTabInfo, TabRuntime
|
| 40 |
+
from core.runtime.keys import ProxyKey
|
| 41 |
+
from core.runtime.local_proxy_forwarder import LocalProxyForwarder, UpstreamProxy, parse_proxy_server
|
| 42 |
+
from core.runtime.session_cache import SessionCache, SessionEntry
|
| 43 |
+
|
| 44 |
+
from core.api.conv_parser import parse_conv_uuid_from_messages, session_id_suffix
|
| 45 |
+
from core.api.fingerprint import compute_conversation_fingerprint
|
| 46 |
+
from core.api.react import format_react_prompt
|
| 47 |
+
from core.api.schemas import OpenAIChatRequest, extract_user_content
|
| 48 |
+
from core.hub.schemas import OpenAIStreamEvent
|
| 49 |
+
from core.runtime.conversation_index import ConversationIndex
|
| 50 |
+
|
| 51 |
+
logger = logging.getLogger(__name__)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _request_messages_as_dicts(req: OpenAIChatRequest) -> list[dict[str, Any]]:
|
| 55 |
+
"""转为 conv_parser 需要的 list[dict]。"""
|
| 56 |
+
out: list[dict[str, Any]] = []
|
| 57 |
+
for m in req.messages:
|
| 58 |
+
d: dict[str, Any] = {"role": m.role}
|
| 59 |
+
if isinstance(m.content, list):
|
| 60 |
+
d["content"] = [p.model_dump() for p in m.content]
|
| 61 |
+
else:
|
| 62 |
+
d["content"] = m.content
|
| 63 |
+
out.append(d)
|
| 64 |
+
return out
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _proxy_key_for_group(group: ProxyGroupConfig) -> ProxyKey:
|
| 68 |
+
return ProxyKey(
|
| 69 |
+
group.proxy_host,
|
| 70 |
+
group.proxy_user,
|
| 71 |
+
group.fingerprint_id,
|
| 72 |
+
group.use_proxy,
|
| 73 |
+
group.timezone or TIMEZONE,
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
@dataclass
|
| 78 |
+
class _RequestTarget:
|
| 79 |
+
proxy_key: ProxyKey
|
| 80 |
+
group: ProxyGroupConfig
|
| 81 |
+
account: AccountConfig
|
| 82 |
+
context: BrowserContext
|
| 83 |
+
page: Page
|
| 84 |
+
session_id: str | None
|
| 85 |
+
full_history: bool
|
| 86 |
+
proxy_url: str | None = None
|
| 87 |
+
proxy_auth: tuple[str, str] | None = None
|
| 88 |
+
proxy_forwarder: LocalProxyForwarder | None = None
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class ChatHandler:
|
| 92 |
+
"""编排一次 chat 请求:会话解析、tab 调度、插件调用。"""
|
| 93 |
+
|
| 94 |
+
def __init__(
|
| 95 |
+
self,
|
| 96 |
+
pool: AccountPool,
|
| 97 |
+
session_cache: SessionCache,
|
| 98 |
+
browser_manager: BrowserManager,
|
| 99 |
+
config_repo: ConfigRepository | None = None,
|
| 100 |
+
) -> None:
|
| 101 |
+
self._pool = pool
|
| 102 |
+
self._session_cache = session_cache
|
| 103 |
+
self._browser_manager = browser_manager
|
| 104 |
+
self._config_repo = config_repo
|
| 105 |
+
self._conv_index = ConversationIndex()
|
| 106 |
+
self._schedule_lock = asyncio.Lock()
|
| 107 |
+
self._stop_event = asyncio.Event()
|
| 108 |
+
self._busy_sessions: set[str] = set()
|
| 109 |
+
self._tab_max_concurrent = int(get("scheduler", "tab_max_concurrent") or 5)
|
| 110 |
+
self._gc_interval_seconds = float(
|
| 111 |
+
get("scheduler", "browser_gc_interval_seconds") or 300
|
| 112 |
+
)
|
| 113 |
+
self._tab_idle_seconds = float(get("scheduler", "tab_idle_seconds") or 900)
|
| 114 |
+
self._resident_browser_count = int(
|
| 115 |
+
get("scheduler", "resident_browser_count", 1)
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
def reload_pool(
|
| 119 |
+
self,
|
| 120 |
+
groups: list[ProxyGroupConfig],
|
| 121 |
+
config_repo: ConfigRepository | None = None,
|
| 122 |
+
) -> None:
|
| 123 |
+
"""配置热更新后替换账号池与 repository。"""
|
| 124 |
+
self._pool.reload(groups)
|
| 125 |
+
if config_repo is not None:
|
| 126 |
+
self._config_repo = config_repo
|
| 127 |
+
|
| 128 |
+
async def refresh_configuration(
|
| 129 |
+
self,
|
| 130 |
+
groups: list[ProxyGroupConfig],
|
| 131 |
+
config_repo: ConfigRepository | None = None,
|
| 132 |
+
) -> None:
|
| 133 |
+
"""配置热更新:替换账号池、清理失效资源,并重新预热常驻浏览器。"""
|
| 134 |
+
async with self._schedule_lock:
|
| 135 |
+
self.reload_pool(groups, config_repo)
|
| 136 |
+
await self._prune_invalid_resources_locked()
|
| 137 |
+
await self._reconcile_tabs_locked()
|
| 138 |
+
await self.prewarm_resident_browsers()
|
| 139 |
+
|
| 140 |
+
async def prewarm_resident_browsers(self) -> None:
|
| 141 |
+
"""启动时预热常驻浏览器,并为其下可用 type 建立 tab。"""
|
| 142 |
+
async with self._schedule_lock:
|
| 143 |
+
warmed = 0
|
| 144 |
+
for group in self._pool.groups():
|
| 145 |
+
if warmed >= self._resident_browser_count:
|
| 146 |
+
break
|
| 147 |
+
available_types = {
|
| 148 |
+
a.type
|
| 149 |
+
for a in group.accounts
|
| 150 |
+
if a.is_available() and PluginRegistry.get(a.type) is not None
|
| 151 |
+
}
|
| 152 |
+
if not available_types:
|
| 153 |
+
continue
|
| 154 |
+
proxy_key = _proxy_key_for_group(group)
|
| 155 |
+
await self._browser_manager.ensure_browser(proxy_key, group.proxy_pass)
|
| 156 |
+
for type_name in sorted(available_types):
|
| 157 |
+
if self._browser_manager.get_tab(proxy_key, type_name) is not None:
|
| 158 |
+
continue
|
| 159 |
+
account = self._pool.available_accounts_in_group(group, type_name)
|
| 160 |
+
if not account:
|
| 161 |
+
continue
|
| 162 |
+
chosen = account[0]
|
| 163 |
+
plugin = PluginRegistry.get(type_name)
|
| 164 |
+
if plugin is None:
|
| 165 |
+
continue
|
| 166 |
+
await self._browser_manager.open_tab(
|
| 167 |
+
proxy_key,
|
| 168 |
+
group.proxy_pass,
|
| 169 |
+
type_name,
|
| 170 |
+
self._pool.account_id(group, chosen),
|
| 171 |
+
plugin.create_page,
|
| 172 |
+
self._make_apply_auth_fn(plugin, chosen),
|
| 173 |
+
)
|
| 174 |
+
warmed += 1
|
| 175 |
+
|
| 176 |
+
async def run_maintenance_loop(self) -> None:
|
| 177 |
+
"""周期性回收空闲浏览器,并收尾 drained/frozen tab。"""
|
| 178 |
+
while not self._stop_event.is_set():
|
| 179 |
+
try:
|
| 180 |
+
await asyncio.wait_for(
|
| 181 |
+
self._stop_event.wait(),
|
| 182 |
+
timeout=self._gc_interval_seconds,
|
| 183 |
+
)
|
| 184 |
+
break
|
| 185 |
+
except asyncio.TimeoutError:
|
| 186 |
+
pass
|
| 187 |
+
|
| 188 |
+
try:
|
| 189 |
+
async with self._schedule_lock:
|
| 190 |
+
# Evict stale sessions to prevent unbounded accumulation.
|
| 191 |
+
stale_ids = self._session_cache.evict_stale()
|
| 192 |
+
# Evict stale fingerprint entries in sync.
|
| 193 |
+
stale_fp = self._conv_index.evict_stale(ttl=1800.0)
|
| 194 |
+
if stale_fp:
|
| 195 |
+
logger.info(
|
| 196 |
+
"[maintenance] evicted %d stale fingerprint entries",
|
| 197 |
+
len(stale_fp),
|
| 198 |
+
)
|
| 199 |
+
if stale_ids:
|
| 200 |
+
for sid in stale_ids:
|
| 201 |
+
plugin_type = None
|
| 202 |
+
for pk, entry in self._browser_manager.list_browser_entries():
|
| 203 |
+
for tn, tab in entry.tabs.items():
|
| 204 |
+
if sid in tab.sessions:
|
| 205 |
+
tab.sessions.discard(sid)
|
| 206 |
+
plugin_type = tn
|
| 207 |
+
break
|
| 208 |
+
if plugin_type:
|
| 209 |
+
plugin = PluginRegistry.get(plugin_type)
|
| 210 |
+
if plugin is not None:
|
| 211 |
+
plugin.drop_session(sid)
|
| 212 |
+
logger.info(
|
| 213 |
+
"[maintenance] evicted %d stale sessions, cache size=%d",
|
| 214 |
+
len(stale_ids),
|
| 215 |
+
len(self._session_cache),
|
| 216 |
+
)
|
| 217 |
+
await self._reconcile_tabs_locked()
|
| 218 |
+
closed = await self._browser_manager.collect_idle_browsers(
|
| 219 |
+
idle_seconds=self._tab_idle_seconds,
|
| 220 |
+
resident_browser_count=self._resident_browser_count,
|
| 221 |
+
)
|
| 222 |
+
self._apply_closed_tabs_locked(closed)
|
| 223 |
+
except Exception:
|
| 224 |
+
logger.exception("维护循环执行失败")
|
| 225 |
+
|
| 226 |
+
async def shutdown(self) -> None:
|
| 227 |
+
"""停止维护循环并关闭全部浏览器。"""
|
| 228 |
+
self._stop_event.set()
|
| 229 |
+
async with self._schedule_lock:
|
| 230 |
+
closed = await self._browser_manager.close_all()
|
| 231 |
+
self._apply_closed_tabs_locked(closed)
|
| 232 |
+
|
| 233 |
+
def report_account_unfreeze(
|
| 234 |
+
self,
|
| 235 |
+
fingerprint_id: str,
|
| 236 |
+
account_name: str,
|
| 237 |
+
unfreeze_at: int,
|
| 238 |
+
) -> None:
|
| 239 |
+
"""记录账号解冻时间,并同步更新内存账号池。"""
|
| 240 |
+
if self._config_repo is None:
|
| 241 |
+
return
|
| 242 |
+
self._config_repo.update_account_unfreeze_at(
|
| 243 |
+
fingerprint_id, account_name, unfreeze_at
|
| 244 |
+
)
|
| 245 |
+
self._pool.update_account_unfreeze_at(
|
| 246 |
+
fingerprint_id,
|
| 247 |
+
account_name,
|
| 248 |
+
unfreeze_at,
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
def get_account_runtime_status(self) -> dict[str, dict[str, Any]]:
|
| 252 |
+
"""返回当前账号运行时状态,供配置页展示角标。"""
|
| 253 |
+
status: dict[str, dict[str, Any]] = {}
|
| 254 |
+
for proxy_key, entry in self._browser_manager.list_browser_entries():
|
| 255 |
+
for type_name, tab in entry.tabs.items():
|
| 256 |
+
status[tab.account_id] = {
|
| 257 |
+
"fingerprint_id": proxy_key.fingerprint_id,
|
| 258 |
+
"type": type_name,
|
| 259 |
+
"is_active": True,
|
| 260 |
+
"tab_state": tab.state,
|
| 261 |
+
"accepting_new": tab.accepting_new,
|
| 262 |
+
"active_requests": tab.active_requests,
|
| 263 |
+
"frozen_until": tab.frozen_until,
|
| 264 |
+
}
|
| 265 |
+
return status
|
| 266 |
+
|
| 267 |
+
def _make_apply_auth_fn(
|
| 268 |
+
self,
|
| 269 |
+
plugin: Any,
|
| 270 |
+
account: AccountConfig,
|
| 271 |
+
) -> Any:
|
| 272 |
+
async def _apply_auth(context: BrowserContext, page: Page) -> None:
|
| 273 |
+
await plugin.apply_auth(context, page, account.auth)
|
| 274 |
+
|
| 275 |
+
return _apply_auth
|
| 276 |
+
|
| 277 |
+
def _apply_closed_tabs_locked(self, closed_tabs: list[ClosedTabInfo]) -> None:
|
| 278 |
+
for info in closed_tabs:
|
| 279 |
+
self._session_cache.delete_many(info.session_ids)
|
| 280 |
+
plugin = PluginRegistry.get(info.type_name)
|
| 281 |
+
if plugin is not None:
|
| 282 |
+
plugin.drop_sessions(info.session_ids)
|
| 283 |
+
|
| 284 |
+
def _stream_proxy_settings(
|
| 285 |
+
self,
|
| 286 |
+
target: _RequestTarget,
|
| 287 |
+
) -> tuple[str | None, tuple[str, str] | None, LocalProxyForwarder | None]:
|
| 288 |
+
if not target.proxy_key.use_proxy:
|
| 289 |
+
return (None, None, None)
|
| 290 |
+
upstream_host, upstream_port = parse_proxy_server(target.proxy_key.proxy_host)
|
| 291 |
+
forwarder = LocalProxyForwarder(
|
| 292 |
+
UpstreamProxy(
|
| 293 |
+
host=upstream_host,
|
| 294 |
+
port=upstream_port,
|
| 295 |
+
username=target.proxy_key.proxy_user,
|
| 296 |
+
password=target.group.proxy_pass,
|
| 297 |
+
),
|
| 298 |
+
listen_host="127.0.0.1",
|
| 299 |
+
listen_port=0,
|
| 300 |
+
on_log=lambda msg: logger.debug("[stream-proxy] %s", msg),
|
| 301 |
+
)
|
| 302 |
+
forwarder.start()
|
| 303 |
+
return (
|
| 304 |
+
forwarder.proxy_url,
|
| 305 |
+
None,
|
| 306 |
+
forwarder,
|
| 307 |
+
)
|
| 308 |
+
|
| 309 |
+
async def _clear_tab_domain_cookies_if_supported(
|
| 310 |
+
self, proxy_key: ProxyKey, type_name: str
|
| 311 |
+
) -> None:
|
| 312 |
+
"""关 tab 前清该 type 对应域名的 cookie(仅支持带 site.cookie_domain 的插件)。"""
|
| 313 |
+
entry = self._browser_manager.get_browser_entry(proxy_key)
|
| 314 |
+
if entry is None:
|
| 315 |
+
return
|
| 316 |
+
plugin = PluginRegistry.get(type_name)
|
| 317 |
+
if not isinstance(plugin, BaseSitePlugin) or not getattr(plugin, "site", None):
|
| 318 |
+
return
|
| 319 |
+
try:
|
| 320 |
+
await clear_cookies_for_domain(entry.context, plugin.site.cookie_domain)
|
| 321 |
+
except Exception as e:
|
| 322 |
+
logger.debug("关 tab 前清 cookie 失败 type=%s: %s", type_name, e)
|
| 323 |
+
|
| 324 |
+
async def _prune_invalid_resources_locked(self) -> None:
|
| 325 |
+
"""关闭配置中已不存在的浏览器/tab,避免热更新后继续使用失效资源。"""
|
| 326 |
+
for proxy_key, entry in list(self._browser_manager.list_browser_entries()):
|
| 327 |
+
group = self._pool.get_group_by_proxy_key(proxy_key)
|
| 328 |
+
if group is None:
|
| 329 |
+
self._apply_closed_tabs_locked(
|
| 330 |
+
await self._browser_manager.close_browser(proxy_key)
|
| 331 |
+
)
|
| 332 |
+
continue
|
| 333 |
+
for type_name in list(entry.tabs.keys()):
|
| 334 |
+
tab = entry.tabs[type_name]
|
| 335 |
+
pair = self._pool.get_account_by_id(tab.account_id)
|
| 336 |
+
if (
|
| 337 |
+
pair is None
|
| 338 |
+
or pair[0] is not group
|
| 339 |
+
or pair[1].type != type_name
|
| 340 |
+
or not pair[1].enabled
|
| 341 |
+
):
|
| 342 |
+
self._invalidate_tab_sessions_locked(proxy_key, type_name)
|
| 343 |
+
if tab.active_requests == 0:
|
| 344 |
+
# 与 reconcile 一致:优先同组同一页 re-auth,失败或无可用账号再关 tab
|
| 345 |
+
switched = False
|
| 346 |
+
group = self._pool.get_group_by_proxy_key(proxy_key)
|
| 347 |
+
if group is not None:
|
| 348 |
+
next_account = self._pool.next_available_account_in_group(
|
| 349 |
+
group,
|
| 350 |
+
type_name,
|
| 351 |
+
exclude_account_ids={tab.account_id},
|
| 352 |
+
)
|
| 353 |
+
if next_account is not None:
|
| 354 |
+
plugin = PluginRegistry.get(type_name)
|
| 355 |
+
if plugin is not None:
|
| 356 |
+
switched = (
|
| 357 |
+
await self._browser_manager.switch_tab_account(
|
| 358 |
+
proxy_key,
|
| 359 |
+
type_name,
|
| 360 |
+
self._pool.account_id(group, next_account),
|
| 361 |
+
self._make_apply_auth_fn(
|
| 362 |
+
plugin,
|
| 363 |
+
next_account,
|
| 364 |
+
),
|
| 365 |
+
)
|
| 366 |
+
)
|
| 367 |
+
if not switched:
|
| 368 |
+
await self._clear_tab_domain_cookies_if_supported(
|
| 369 |
+
proxy_key, type_name
|
| 370 |
+
)
|
| 371 |
+
closed = await self._browser_manager.close_tab(
|
| 372 |
+
proxy_key, type_name
|
| 373 |
+
)
|
| 374 |
+
if closed is not None:
|
| 375 |
+
self._apply_closed_tabs_locked([closed])
|
| 376 |
+
else:
|
| 377 |
+
self._browser_manager.mark_tab_draining(proxy_key, type_name)
|
| 378 |
+
|
| 379 |
+
def _invalidate_session_locked(
|
| 380 |
+
self,
|
| 381 |
+
session_id: str,
|
| 382 |
+
entry: SessionEntry | None = None,
|
| 383 |
+
) -> None:
|
| 384 |
+
entry = entry or self._session_cache.get(session_id)
|
| 385 |
+
if entry is None:
|
| 386 |
+
return
|
| 387 |
+
self._session_cache.delete(session_id)
|
| 388 |
+
self._conv_index.remove_session(session_id)
|
| 389 |
+
self._browser_manager.unregister_session(
|
| 390 |
+
entry.proxy_key,
|
| 391 |
+
entry.type_name,
|
| 392 |
+
session_id,
|
| 393 |
+
)
|
| 394 |
+
plugin = PluginRegistry.get(entry.type_name)
|
| 395 |
+
if plugin is not None:
|
| 396 |
+
plugin.drop_session(session_id)
|
| 397 |
+
|
| 398 |
+
def _invalidate_tab_sessions_locked(
|
| 399 |
+
self,
|
| 400 |
+
proxy_key: ProxyKey,
|
| 401 |
+
type_name: str,
|
| 402 |
+
) -> None:
|
| 403 |
+
tab = self._browser_manager.get_tab(proxy_key, type_name)
|
| 404 |
+
if tab is None or not tab.sessions:
|
| 405 |
+
return
|
| 406 |
+
session_ids = list(tab.sessions)
|
| 407 |
+
self._session_cache.delete_many(session_ids)
|
| 408 |
+
plugin = PluginRegistry.get(type_name)
|
| 409 |
+
if plugin is not None:
|
| 410 |
+
plugin.drop_sessions(session_ids)
|
| 411 |
+
tab.sessions.clear()
|
| 412 |
+
|
| 413 |
+
async def _recover_browser_resource_invalid_locked(
|
| 414 |
+
self,
|
| 415 |
+
type_name: str,
|
| 416 |
+
target: _RequestTarget,
|
| 417 |
+
request_id: str,
|
| 418 |
+
active_session_id: str | None,
|
| 419 |
+
error: BrowserResourceInvalidError,
|
| 420 |
+
attempt: int,
|
| 421 |
+
max_retries: int,
|
| 422 |
+
) -> None:
|
| 423 |
+
account_id = self._pool.account_id(target.group, target.account)
|
| 424 |
+
diagnostics = self._browser_manager.browser_diagnostics(target.proxy_key)
|
| 425 |
+
logger.warning(
|
| 426 |
+
"[chat] browser resource invalid attempt=%s/%s type=%s proxy=%s account=%s session_id=%s request_id=%s resource=%s helper=%s stage=%s stream_phase=%s browser_present=%s proc_alive=%s cdp_listening=%s tab_count=%s active_requests=%s err=%s",
|
| 427 |
+
attempt + 1,
|
| 428 |
+
max_retries,
|
| 429 |
+
type_name,
|
| 430 |
+
target.proxy_key.fingerprint_id,
|
| 431 |
+
account_id,
|
| 432 |
+
active_session_id,
|
| 433 |
+
request_id,
|
| 434 |
+
error.resource_hint,
|
| 435 |
+
error.helper_name,
|
| 436 |
+
error.stage,
|
| 437 |
+
error.stream_phase,
|
| 438 |
+
diagnostics.get("browser_present"),
|
| 439 |
+
diagnostics.get("proc_alive"),
|
| 440 |
+
diagnostics.get("cdp_listening"),
|
| 441 |
+
diagnostics.get("tab_count"),
|
| 442 |
+
diagnostics.get("active_requests"),
|
| 443 |
+
error,
|
| 444 |
+
)
|
| 445 |
+
stderr_tail = str(diagnostics.get("stderr_tail") or "").strip()
|
| 446 |
+
if stderr_tail:
|
| 447 |
+
logger.warning(
|
| 448 |
+
"[chat] browser resource invalid stderr tail proxy=%s request_id=%s:\n%s",
|
| 449 |
+
target.proxy_key.fingerprint_id,
|
| 450 |
+
request_id,
|
| 451 |
+
stderr_tail,
|
| 452 |
+
)
|
| 453 |
+
|
| 454 |
+
if active_session_id is not None:
|
| 455 |
+
self._invalidate_session_locked(active_session_id)
|
| 456 |
+
if error.resource_hint == "transport":
|
| 457 |
+
logger.warning(
|
| 458 |
+
"[chat] transport-level stream failure, keep tab/browser and retry proxy=%s request_id=%s",
|
| 459 |
+
target.proxy_key.fingerprint_id,
|
| 460 |
+
request_id,
|
| 461 |
+
)
|
| 462 |
+
return
|
| 463 |
+
self._browser_manager.mark_tab_draining(target.proxy_key, type_name)
|
| 464 |
+
|
| 465 |
+
browser_restart_reason: str | None = None
|
| 466 |
+
if error.resource_hint == "browser":
|
| 467 |
+
browser_restart_reason = "resource_hint"
|
| 468 |
+
# Legacy: page_fetch transport is no longer used by Claude (context_request since v0.x).
|
| 469 |
+
# Kept for potential future plugins that still use page_fetch transport.
|
| 470 |
+
elif (
|
| 471 |
+
error.helper_name == "stream_raw_via_page_fetch"
|
| 472 |
+
and error.stage in {"read_timeout", "evaluate_timeout"}
|
| 473 |
+
):
|
| 474 |
+
browser_restart_reason = f"{error.helper_name}:{error.stage}"
|
| 475 |
+
|
| 476 |
+
if browser_restart_reason is not None:
|
| 477 |
+
logger.warning(
|
| 478 |
+
"[chat] escalating browser recovery to full restart proxy=%s request_id=%s reason=%s",
|
| 479 |
+
target.proxy_key.fingerprint_id,
|
| 480 |
+
request_id,
|
| 481 |
+
browser_restart_reason,
|
| 482 |
+
)
|
| 483 |
+
closed = await self._browser_manager.close_browser(target.proxy_key)
|
| 484 |
+
self._apply_closed_tabs_locked(closed)
|
| 485 |
+
return
|
| 486 |
+
|
| 487 |
+
self._invalidate_tab_sessions_locked(target.proxy_key, type_name)
|
| 488 |
+
closed = await self._browser_manager.close_tab(target.proxy_key, type_name)
|
| 489 |
+
if closed is not None:
|
| 490 |
+
self._apply_closed_tabs_locked([closed])
|
| 491 |
+
|
| 492 |
+
def _revive_tab_if_possible_locked(
|
| 493 |
+
self,
|
| 494 |
+
proxy_key: ProxyKey,
|
| 495 |
+
type_name: str,
|
| 496 |
+
) -> bool:
|
| 497 |
+
tab = self._browser_manager.get_tab(proxy_key, type_name)
|
| 498 |
+
if tab is None or tab.active_requests != 0:
|
| 499 |
+
return False
|
| 500 |
+
if tab.accepting_new:
|
| 501 |
+
return True
|
| 502 |
+
|
| 503 |
+
pair = self._pool.get_account_by_id(tab.account_id)
|
| 504 |
+
if pair is None:
|
| 505 |
+
return False
|
| 506 |
+
_, account = pair
|
| 507 |
+
if not account.is_available():
|
| 508 |
+
return False
|
| 509 |
+
tab.accepting_new = True
|
| 510 |
+
tab.state = "ready"
|
| 511 |
+
tab.frozen_until = None
|
| 512 |
+
tab.last_used_at = time.time()
|
| 513 |
+
return True
|
| 514 |
+
|
| 515 |
+
async def _reconcile_tabs_locked(self) -> None:
|
| 516 |
+
"""
|
| 517 |
+
收尾所有 non-ready tab:
|
| 518 |
+
|
| 519 |
+
- 若原账号已恢复可用,则恢复 tab
|
| 520 |
+
- 否则若同组有其他可用账号,则在 drained 后切号
|
| 521 |
+
- 否则关闭 tab
|
| 522 |
+
"""
|
| 523 |
+
for proxy_key, entry in list(self._browser_manager.list_browser_entries()):
|
| 524 |
+
for type_name in list(entry.tabs.keys()):
|
| 525 |
+
tab = entry.tabs[type_name]
|
| 526 |
+
if tab.accepting_new:
|
| 527 |
+
continue
|
| 528 |
+
if tab.active_requests != 0:
|
| 529 |
+
continue
|
| 530 |
+
if self._revive_tab_if_possible_locked(proxy_key, type_name):
|
| 531 |
+
continue
|
| 532 |
+
|
| 533 |
+
group = self._pool.get_group_by_proxy_key(proxy_key)
|
| 534 |
+
if group is None:
|
| 535 |
+
await self._clear_tab_domain_cookies_if_supported(
|
| 536 |
+
proxy_key, type_name
|
| 537 |
+
)
|
| 538 |
+
closed = await self._browser_manager.close_tab(proxy_key, type_name)
|
| 539 |
+
if closed is not None:
|
| 540 |
+
self._apply_closed_tabs_locked([closed])
|
| 541 |
+
continue
|
| 542 |
+
|
| 543 |
+
next_account = self._pool.next_available_account_in_group(
|
| 544 |
+
group,
|
| 545 |
+
type_name,
|
| 546 |
+
exclude_account_ids={tab.account_id},
|
| 547 |
+
)
|
| 548 |
+
if next_account is not None:
|
| 549 |
+
plugin = PluginRegistry.get(type_name)
|
| 550 |
+
if plugin is None:
|
| 551 |
+
continue
|
| 552 |
+
self._invalidate_tab_sessions_locked(proxy_key, type_name)
|
| 553 |
+
switched = await self._browser_manager.switch_tab_account(
|
| 554 |
+
proxy_key,
|
| 555 |
+
type_name,
|
| 556 |
+
self._pool.account_id(group, next_account),
|
| 557 |
+
self._make_apply_auth_fn(plugin, next_account),
|
| 558 |
+
)
|
| 559 |
+
if switched:
|
| 560 |
+
continue
|
| 561 |
+
|
| 562 |
+
await self._clear_tab_domain_cookies_if_supported(proxy_key, type_name)
|
| 563 |
+
closed = await self._browser_manager.close_tab(proxy_key, type_name)
|
| 564 |
+
if closed is not None:
|
| 565 |
+
self._apply_closed_tabs_locked([closed])
|
| 566 |
+
|
| 567 |
+
async def _reuse_session_target_locked(
|
| 568 |
+
self,
|
| 569 |
+
plugin: Any,
|
| 570 |
+
type_name: str,
|
| 571 |
+
session_id: str,
|
| 572 |
+
) -> _RequestTarget | None:
|
| 573 |
+
entry = self._session_cache.get(session_id)
|
| 574 |
+
if entry is None or entry.type_name != type_name:
|
| 575 |
+
return None
|
| 576 |
+
|
| 577 |
+
pair = self._pool.get_account_by_id(entry.account_id)
|
| 578 |
+
if pair is None:
|
| 579 |
+
self._invalidate_session_locked(session_id, entry)
|
| 580 |
+
return None
|
| 581 |
+
group, account = pair
|
| 582 |
+
|
| 583 |
+
tab = self._browser_manager.get_tab(entry.proxy_key, type_name)
|
| 584 |
+
if (
|
| 585 |
+
tab is None
|
| 586 |
+
or tab.account_id != entry.account_id
|
| 587 |
+
or not plugin.has_session(session_id)
|
| 588 |
+
):
|
| 589 |
+
self._invalidate_session_locked(session_id, entry)
|
| 590 |
+
return None
|
| 591 |
+
|
| 592 |
+
if not tab.accepting_new:
|
| 593 |
+
self._invalidate_session_locked(session_id, entry)
|
| 594 |
+
return None
|
| 595 |
+
if session_id in self._busy_sessions:
|
| 596 |
+
raise RuntimeError("当前会话正在处理中,请稍后再试")
|
| 597 |
+
if tab.active_requests >= self._tab_max_concurrent:
|
| 598 |
+
raise RuntimeError("当前会话所在 tab 繁忙,请稍后再试")
|
| 599 |
+
|
| 600 |
+
page = self._browser_manager.acquire_tab(
|
| 601 |
+
entry.proxy_key,
|
| 602 |
+
type_name,
|
| 603 |
+
self._tab_max_concurrent,
|
| 604 |
+
)
|
| 605 |
+
if page is None:
|
| 606 |
+
raise RuntimeError("当前会话暂不可复用,请稍后再试")
|
| 607 |
+
|
| 608 |
+
self._session_cache.touch(session_id)
|
| 609 |
+
self._busy_sessions.add(session_id)
|
| 610 |
+
context = await self._browser_manager.ensure_browser(
|
| 611 |
+
entry.proxy_key,
|
| 612 |
+
group.proxy_pass,
|
| 613 |
+
)
|
| 614 |
+
return _RequestTarget(
|
| 615 |
+
proxy_key=entry.proxy_key,
|
| 616 |
+
group=group,
|
| 617 |
+
account=account,
|
| 618 |
+
context=context,
|
| 619 |
+
page=page,
|
| 620 |
+
session_id=session_id,
|
| 621 |
+
full_history=False,
|
| 622 |
+
)
|
| 623 |
+
|
| 624 |
+
async def _allocate_new_target_locked(
|
| 625 |
+
self,
|
| 626 |
+
type_name: str,
|
| 627 |
+
) -> _RequestTarget:
|
| 628 |
+
# 1. 已打开浏览器里已有该 type 的可服务 tab,直接复用。
|
| 629 |
+
existing_tabs: list[tuple[int, float, ProxyKey, TabRuntime]] = []
|
| 630 |
+
for proxy_key, entry in self._browser_manager.list_browser_entries():
|
| 631 |
+
tab = entry.tabs.get(type_name)
|
| 632 |
+
if (
|
| 633 |
+
tab is not None
|
| 634 |
+
and tab.accepting_new
|
| 635 |
+
and tab.active_requests < self._tab_max_concurrent
|
| 636 |
+
):
|
| 637 |
+
existing_tabs.append(
|
| 638 |
+
(tab.active_requests, tab.last_used_at, proxy_key, tab)
|
| 639 |
+
)
|
| 640 |
+
if existing_tabs:
|
| 641 |
+
_, _, proxy_key, tab = min(existing_tabs, key=lambda item: item[:2])
|
| 642 |
+
pair = self._pool.get_account_by_id(tab.account_id)
|
| 643 |
+
if pair is None:
|
| 644 |
+
self._invalidate_tab_sessions_locked(proxy_key, type_name)
|
| 645 |
+
closed = await self._browser_manager.close_tab(proxy_key, type_name)
|
| 646 |
+
if closed is not None:
|
| 647 |
+
self._apply_closed_tabs_locked([closed])
|
| 648 |
+
else:
|
| 649 |
+
group, account = pair
|
| 650 |
+
page = self._browser_manager.acquire_tab(
|
| 651 |
+
proxy_key,
|
| 652 |
+
type_name,
|
| 653 |
+
self._tab_max_concurrent,
|
| 654 |
+
)
|
| 655 |
+
if page is not None:
|
| 656 |
+
context = await self._browser_manager.ensure_browser(
|
| 657 |
+
proxy_key,
|
| 658 |
+
group.proxy_pass,
|
| 659 |
+
)
|
| 660 |
+
return _RequestTarget(
|
| 661 |
+
proxy_key=proxy_key,
|
| 662 |
+
group=group,
|
| 663 |
+
account=account,
|
| 664 |
+
context=context,
|
| 665 |
+
page=page,
|
| 666 |
+
session_id=None,
|
| 667 |
+
full_history=True,
|
| 668 |
+
)
|
| 669 |
+
|
| 670 |
+
# 2. 已打开浏览器里还没有该 type tab,但该组有可用账号,直接建新 tab。
|
| 671 |
+
open_browser_candidates: list[
|
| 672 |
+
tuple[int, float, ProxyKey, ProxyGroupConfig]
|
| 673 |
+
] = []
|
| 674 |
+
for proxy_key, entry in self._browser_manager.list_browser_entries():
|
| 675 |
+
if type_name in entry.tabs:
|
| 676 |
+
continue
|
| 677 |
+
group = self._pool.get_group_by_proxy_key(proxy_key)
|
| 678 |
+
if group is None:
|
| 679 |
+
continue
|
| 680 |
+
if not self._pool.has_available_account_in_group(group, type_name):
|
| 681 |
+
continue
|
| 682 |
+
open_browser_candidates.append(
|
| 683 |
+
(
|
| 684 |
+
self._browser_manager.browser_load(proxy_key),
|
| 685 |
+
entry.last_used_at,
|
| 686 |
+
proxy_key,
|
| 687 |
+
group,
|
| 688 |
+
)
|
| 689 |
+
)
|
| 690 |
+
if open_browser_candidates:
|
| 691 |
+
_, _, proxy_key, group = min(
|
| 692 |
+
open_browser_candidates, key=lambda item: item[:2]
|
| 693 |
+
)
|
| 694 |
+
account = self._pool.next_available_account_in_group(group, type_name)
|
| 695 |
+
if account is not None:
|
| 696 |
+
plugin = PluginRegistry.get(type_name)
|
| 697 |
+
if plugin is None:
|
| 698 |
+
raise ValueError(f"未注册的 type: {type_name}")
|
| 699 |
+
await self._browser_manager.open_tab(
|
| 700 |
+
proxy_key,
|
| 701 |
+
group.proxy_pass,
|
| 702 |
+
type_name,
|
| 703 |
+
self._pool.account_id(group, account),
|
| 704 |
+
plugin.create_page,
|
| 705 |
+
self._make_apply_auth_fn(plugin, account),
|
| 706 |
+
)
|
| 707 |
+
page = self._browser_manager.acquire_tab(
|
| 708 |
+
proxy_key,
|
| 709 |
+
type_name,
|
| 710 |
+
self._tab_max_concurrent,
|
| 711 |
+
)
|
| 712 |
+
if page is None:
|
| 713 |
+
raise RuntimeError("新建 tab 后仍无法占用请求槽位")
|
| 714 |
+
context = await self._browser_manager.ensure_browser(
|
| 715 |
+
proxy_key,
|
| 716 |
+
group.proxy_pass,
|
| 717 |
+
)
|
| 718 |
+
return _RequestTarget(
|
| 719 |
+
proxy_key=proxy_key,
|
| 720 |
+
group=group,
|
| 721 |
+
account=account,
|
| 722 |
+
context=context,
|
| 723 |
+
page=page,
|
| 724 |
+
session_id=None,
|
| 725 |
+
full_history=True,
|
| 726 |
+
)
|
| 727 |
+
|
| 728 |
+
# 3. 已打开浏览器里该 type tab 已 drained,且同组有备用账号,可在当前 tab 切号。
|
| 729 |
+
switch_candidates: list[tuple[float, ProxyKey, ProxyGroupConfig]] = []
|
| 730 |
+
for proxy_key, entry in self._browser_manager.list_browser_entries():
|
| 731 |
+
tab = entry.tabs.get(type_name)
|
| 732 |
+
if tab is None or tab.active_requests != 0:
|
| 733 |
+
continue
|
| 734 |
+
group = self._pool.get_group_by_proxy_key(proxy_key)
|
| 735 |
+
if group is None:
|
| 736 |
+
continue
|
| 737 |
+
if not self._pool.has_available_account_in_group(
|
| 738 |
+
group,
|
| 739 |
+
type_name,
|
| 740 |
+
exclude_account_ids={tab.account_id},
|
| 741 |
+
):
|
| 742 |
+
continue
|
| 743 |
+
switch_candidates.append((tab.last_used_at, proxy_key, group))
|
| 744 |
+
if switch_candidates:
|
| 745 |
+
_, proxy_key, group = min(switch_candidates, key=lambda item: item[0])
|
| 746 |
+
tab = self._browser_manager.get_tab(proxy_key, type_name)
|
| 747 |
+
plugin = PluginRegistry.get(type_name)
|
| 748 |
+
if tab is not None and plugin is not None:
|
| 749 |
+
next_account = self._pool.next_available_account_in_group(
|
| 750 |
+
group,
|
| 751 |
+
type_name,
|
| 752 |
+
exclude_account_ids={tab.account_id},
|
| 753 |
+
)
|
| 754 |
+
if next_account is not None:
|
| 755 |
+
self._invalidate_tab_sessions_locked(proxy_key, type_name)
|
| 756 |
+
switched = await self._browser_manager.switch_tab_account(
|
| 757 |
+
proxy_key,
|
| 758 |
+
type_name,
|
| 759 |
+
self._pool.account_id(group, next_account),
|
| 760 |
+
self._make_apply_auth_fn(plugin, next_account),
|
| 761 |
+
)
|
| 762 |
+
if switched:
|
| 763 |
+
page = self._browser_manager.acquire_tab(
|
| 764 |
+
proxy_key,
|
| 765 |
+
type_name,
|
| 766 |
+
self._tab_max_concurrent,
|
| 767 |
+
)
|
| 768 |
+
if page is None:
|
| 769 |
+
raise RuntimeError("切号后仍无法占用请求槽位")
|
| 770 |
+
context = await self._browser_manager.ensure_browser(
|
| 771 |
+
proxy_key,
|
| 772 |
+
group.proxy_pass,
|
| 773 |
+
)
|
| 774 |
+
return _RequestTarget(
|
| 775 |
+
proxy_key=proxy_key,
|
| 776 |
+
group=group,
|
| 777 |
+
account=next_account,
|
| 778 |
+
context=context,
|
| 779 |
+
page=page,
|
| 780 |
+
session_id=None,
|
| 781 |
+
full_history=True,
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
# 4. 开新浏览器。
|
| 785 |
+
open_groups = {
|
| 786 |
+
proxy_key.fingerprint_id
|
| 787 |
+
for proxy_key in self._browser_manager.current_proxy_keys()
|
| 788 |
+
}
|
| 789 |
+
pair = self._pool.next_available_pair(
|
| 790 |
+
type_name,
|
| 791 |
+
exclude_fingerprint_ids=open_groups,
|
| 792 |
+
)
|
| 793 |
+
if pair is None:
|
| 794 |
+
raise ValueError(f"没有类别为 {type_name!r} 的可用账号,请稍后再试")
|
| 795 |
+
group, account = pair
|
| 796 |
+
proxy_key = _proxy_key_for_group(group)
|
| 797 |
+
plugin = PluginRegistry.get(type_name)
|
| 798 |
+
if plugin is None:
|
| 799 |
+
raise ValueError(f"未注册的 type: {type_name}")
|
| 800 |
+
await self._browser_manager.open_tab(
|
| 801 |
+
proxy_key,
|
| 802 |
+
group.proxy_pass,
|
| 803 |
+
type_name,
|
| 804 |
+
self._pool.account_id(group, account),
|
| 805 |
+
plugin.create_page,
|
| 806 |
+
self._make_apply_auth_fn(plugin, account),
|
| 807 |
+
)
|
| 808 |
+
page = self._browser_manager.acquire_tab(
|
| 809 |
+
proxy_key,
|
| 810 |
+
type_name,
|
| 811 |
+
self._tab_max_concurrent,
|
| 812 |
+
)
|
| 813 |
+
if page is None:
|
| 814 |
+
raise RuntimeError("新浏览器建 tab 后仍无法占用请求槽位")
|
| 815 |
+
context = await self._browser_manager.ensure_browser(
|
| 816 |
+
proxy_key, group.proxy_pass
|
| 817 |
+
)
|
| 818 |
+
return _RequestTarget(
|
| 819 |
+
proxy_key=proxy_key,
|
| 820 |
+
group=group,
|
| 821 |
+
account=account,
|
| 822 |
+
context=context,
|
| 823 |
+
page=page,
|
| 824 |
+
session_id=None,
|
| 825 |
+
full_history=True,
|
| 826 |
+
)
|
| 827 |
+
|
| 828 |
+
async def _stream_completion(
|
| 829 |
+
self,
|
| 830 |
+
type_name: str,
|
| 831 |
+
req: OpenAIChatRequest,
|
| 832 |
+
) -> AsyncIterator[str]:
|
| 833 |
+
"""
|
| 834 |
+
内部实现:调度 + 插件 stream_completion 字符串流,末尾附加 session_id 零宽编码。
|
| 835 |
+
对外仅通过 stream_openai_events() 暴露事件流。
|
| 836 |
+
"""
|
| 837 |
+
plugin = PluginRegistry.get(type_name)
|
| 838 |
+
if plugin is None:
|
| 839 |
+
raise ValueError(f"未注册的 type: {type_name}")
|
| 840 |
+
|
| 841 |
+
raw_messages = _request_messages_as_dicts(req)
|
| 842 |
+
conv_uuid = req.resume_session_id or parse_conv_uuid_from_messages(raw_messages)
|
| 843 |
+
|
| 844 |
+
# Fingerprint matching: when the client doesn't preserve the zero-width
|
| 845 |
+
# session marker (conv_uuid is None), compute a fingerprint from
|
| 846 |
+
# system prompt + first user message and look up the matching session.
|
| 847 |
+
# This replaces sticky session and prevents context pollution.
|
| 848 |
+
fingerprint = ""
|
| 849 |
+
if not conv_uuid:
|
| 850 |
+
fingerprint = compute_conversation_fingerprint(req.messages)
|
| 851 |
+
if fingerprint:
|
| 852 |
+
entry = self._conv_index.lookup(fingerprint)
|
| 853 |
+
if entry is not None:
|
| 854 |
+
conv_uuid = entry.session_id
|
| 855 |
+
|
| 856 |
+
logger.info("[chat] type=%s parsed conv_uuid=%s fingerprint=%s", type_name, conv_uuid, fingerprint or "n/a")
|
| 857 |
+
|
| 858 |
+
has_tools = bool(req.tools)
|
| 859 |
+
react_prompt_prefix = format_react_prompt(req.tools or []) if has_tools else ""
|
| 860 |
+
|
| 861 |
+
debug_path = (
|
| 862 |
+
Path(__file__).resolve().parent.parent.parent
|
| 863 |
+
/ "debug"
|
| 864 |
+
/ "chat_prompt_debug.json"
|
| 865 |
+
)
|
| 866 |
+
|
| 867 |
+
max_retries = 3
|
| 868 |
+
for attempt in range(max_retries):
|
| 869 |
+
target: _RequestTarget | None = None
|
| 870 |
+
active_session_id: str | None = None
|
| 871 |
+
request_id = uuid.uuid4().hex
|
| 872 |
+
try:
|
| 873 |
+
async with self._schedule_lock:
|
| 874 |
+
if conv_uuid:
|
| 875 |
+
target = await self._reuse_session_target_locked(
|
| 876 |
+
plugin,
|
| 877 |
+
type_name,
|
| 878 |
+
conv_uuid,
|
| 879 |
+
)
|
| 880 |
+
if target is None:
|
| 881 |
+
target = await self._allocate_new_target_locked(type_name)
|
| 882 |
+
if target.session_id is not None:
|
| 883 |
+
active_session_id = target.session_id
|
| 884 |
+
|
| 885 |
+
content = extract_user_content(
|
| 886 |
+
req.messages,
|
| 887 |
+
has_tools=has_tools,
|
| 888 |
+
react_prompt_prefix=react_prompt_prefix,
|
| 889 |
+
full_history=target.full_history,
|
| 890 |
+
)
|
| 891 |
+
if not content.strip() and req.attachment_files:
|
| 892 |
+
content = "Please analyze the attached image."
|
| 893 |
+
if not content.strip():
|
| 894 |
+
raise ValueError("messages 中需至少有一条带 content 的 user 消息")
|
| 895 |
+
|
| 896 |
+
debug_path.parent.mkdir(parents=True, exist_ok=True)
|
| 897 |
+
debug_path.write_text(
|
| 898 |
+
json.dumps(
|
| 899 |
+
{
|
| 900 |
+
"prompt": content,
|
| 901 |
+
"full_history": target.full_history,
|
| 902 |
+
"type": type_name,
|
| 903 |
+
},
|
| 904 |
+
ensure_ascii=False,
|
| 905 |
+
indent=2,
|
| 906 |
+
),
|
| 907 |
+
encoding="utf-8",
|
| 908 |
+
)
|
| 909 |
+
|
| 910 |
+
account_id = self._pool.account_id(target.group, target.account)
|
| 911 |
+
session_id = target.session_id
|
| 912 |
+
if session_id is None:
|
| 913 |
+
await plugin.ensure_request_ready(
|
| 914 |
+
target.context,
|
| 915 |
+
target.page,
|
| 916 |
+
request_id=request_id,
|
| 917 |
+
session_id=None,
|
| 918 |
+
phase="create_conversation",
|
| 919 |
+
account_id=account_id,
|
| 920 |
+
)
|
| 921 |
+
logger.info(
|
| 922 |
+
"[chat] create_conversation type=%s proxy=%s account=%s",
|
| 923 |
+
type_name,
|
| 924 |
+
target.proxy_key.fingerprint_id,
|
| 925 |
+
account_id,
|
| 926 |
+
)
|
| 927 |
+
session_id = await plugin.create_conversation(
|
| 928 |
+
target.context,
|
| 929 |
+
target.page,
|
| 930 |
+
timezone=target.group.timezone
|
| 931 |
+
or getattr(target.proxy_key, "timezone", None)
|
| 932 |
+
or TIMEZONE,
|
| 933 |
+
public_model=str(getattr(req, "model", "") or ""),
|
| 934 |
+
upstream_model=str(getattr(req, "upstream_model", "") or ""),
|
| 935 |
+
request_id=request_id,
|
| 936 |
+
)
|
| 937 |
+
if not session_id:
|
| 938 |
+
raise RuntimeError("插件创建会话失败")
|
| 939 |
+
async with self._schedule_lock:
|
| 940 |
+
self._session_cache.put(
|
| 941 |
+
session_id,
|
| 942 |
+
target.proxy_key,
|
| 943 |
+
type_name,
|
| 944 |
+
account_id,
|
| 945 |
+
)
|
| 946 |
+
self._browser_manager.register_session(
|
| 947 |
+
target.proxy_key,
|
| 948 |
+
type_name,
|
| 949 |
+
session_id,
|
| 950 |
+
)
|
| 951 |
+
self._busy_sessions.add(session_id)
|
| 952 |
+
# Register fingerprint for future matching
|
| 953 |
+
if fingerprint:
|
| 954 |
+
self._conv_index.register(
|
| 955 |
+
fingerprint,
|
| 956 |
+
session_id,
|
| 957 |
+
len(req.messages),
|
| 958 |
+
account_id,
|
| 959 |
+
)
|
| 960 |
+
active_session_id = session_id
|
| 961 |
+
|
| 962 |
+
# Skip pre-stream probe for newly created sessions:
|
| 963 |
+
# create_conversation already validated page health.
|
| 964 |
+
if target.session_id is not None:
|
| 965 |
+
await plugin.ensure_request_ready(
|
| 966 |
+
target.context,
|
| 967 |
+
target.page,
|
| 968 |
+
request_id=request_id,
|
| 969 |
+
session_id=session_id,
|
| 970 |
+
phase="stream_completion",
|
| 971 |
+
account_id=account_id,
|
| 972 |
+
)
|
| 973 |
+
logger.info(
|
| 974 |
+
"[chat] stream_completion type=%s session_id=%s proxy=%s account=%s full_history=%s",
|
| 975 |
+
type_name,
|
| 976 |
+
session_id,
|
| 977 |
+
target.proxy_key.fingerprint_id,
|
| 978 |
+
account_id,
|
| 979 |
+
target.full_history,
|
| 980 |
+
)
|
| 981 |
+
# 根据是否 full_history 选择附件来源:
|
| 982 |
+
# - 复用会话(full_history=False):仅最后一条 user 的图片(可能为空,则本轮不带图)
|
| 983 |
+
# - 新建/重建会话(full_history=True):所有历史 user 的图片
|
| 984 |
+
attachments = (
|
| 985 |
+
req.attachment_files_all_users
|
| 986 |
+
if target.full_history
|
| 987 |
+
else req.attachment_files_last_user
|
| 988 |
+
)
|
| 989 |
+
|
| 990 |
+
proxy_url = None
|
| 991 |
+
proxy_auth = None
|
| 992 |
+
proxy_forwarder = None
|
| 993 |
+
if plugin.stream_transport() == "context_request":
|
| 994 |
+
proxy_url, proxy_auth, proxy_forwarder = self._stream_proxy_settings(target)
|
| 995 |
+
target.proxy_url = proxy_url
|
| 996 |
+
target.proxy_auth = proxy_auth
|
| 997 |
+
target.proxy_forwarder = proxy_forwarder
|
| 998 |
+
try:
|
| 999 |
+
stream = cast(
|
| 1000 |
+
AsyncIterator[str],
|
| 1001 |
+
plugin.stream_completion(
|
| 1002 |
+
target.context,
|
| 1003 |
+
target.page,
|
| 1004 |
+
session_id,
|
| 1005 |
+
content,
|
| 1006 |
+
request_id=request_id,
|
| 1007 |
+
attachments=attachments,
|
| 1008 |
+
proxy_url=proxy_url,
|
| 1009 |
+
proxy_auth=proxy_auth,
|
| 1010 |
+
),
|
| 1011 |
+
)
|
| 1012 |
+
async for chunk in stream:
|
| 1013 |
+
yield chunk
|
| 1014 |
+
finally:
|
| 1015 |
+
if proxy_forwarder is not None:
|
| 1016 |
+
try:
|
| 1017 |
+
proxy_forwarder.stop()
|
| 1018 |
+
except Exception:
|
| 1019 |
+
pass
|
| 1020 |
+
target.proxy_forwarder = None
|
| 1021 |
+
|
| 1022 |
+
yield session_id_suffix(session_id)
|
| 1023 |
+
return
|
| 1024 |
+
except AccountFrozenError as e:
|
| 1025 |
+
logger.warning(
|
| 1026 |
+
"账号限流/额度用尽(插件上报),切换资源重试: type=%s proxy=%s err=%s",
|
| 1027 |
+
type_name,
|
| 1028 |
+
target.proxy_key.fingerprint_id if target else None,
|
| 1029 |
+
e,
|
| 1030 |
+
)
|
| 1031 |
+
async with self._schedule_lock:
|
| 1032 |
+
if target is not None:
|
| 1033 |
+
self.report_account_unfreeze(
|
| 1034 |
+
target.group.fingerprint_id,
|
| 1035 |
+
target.account.name,
|
| 1036 |
+
e.unfreeze_at,
|
| 1037 |
+
)
|
| 1038 |
+
self._browser_manager.mark_tab_draining(
|
| 1039 |
+
target.proxy_key,
|
| 1040 |
+
type_name,
|
| 1041 |
+
frozen_until=e.unfreeze_at,
|
| 1042 |
+
)
|
| 1043 |
+
self._invalidate_tab_sessions_locked(
|
| 1044 |
+
target.proxy_key, type_name
|
| 1045 |
+
)
|
| 1046 |
+
if attempt == max_retries - 1:
|
| 1047 |
+
raise RuntimeError(
|
| 1048 |
+
f"已重试 {max_retries} 次仍限流/过载,请稍后再试: {e}"
|
| 1049 |
+
) from e
|
| 1050 |
+
continue
|
| 1051 |
+
except BrowserResourceInvalidError as e:
|
| 1052 |
+
proxy_for_log = (
|
| 1053 |
+
target.proxy_key.fingerprint_id
|
| 1054 |
+
if target is not None
|
| 1055 |
+
else getattr(getattr(e, "proxy_key", None), "fingerprint_id", None)
|
| 1056 |
+
)
|
| 1057 |
+
logger.warning(
|
| 1058 |
+
"[chat] browser resource invalid bubbled type=%s request_id=%s proxy=%s session_id=%s helper=%s stage=%s resource=%s err=%s",
|
| 1059 |
+
type_name,
|
| 1060 |
+
request_id,
|
| 1061 |
+
proxy_for_log,
|
| 1062 |
+
active_session_id,
|
| 1063 |
+
e.helper_name,
|
| 1064 |
+
e.stage,
|
| 1065 |
+
e.resource_hint,
|
| 1066 |
+
e,
|
| 1067 |
+
)
|
| 1068 |
+
async with self._schedule_lock:
|
| 1069 |
+
if target is not None:
|
| 1070 |
+
await self._recover_browser_resource_invalid_locked(
|
| 1071 |
+
type_name,
|
| 1072 |
+
target,
|
| 1073 |
+
request_id,
|
| 1074 |
+
active_session_id,
|
| 1075 |
+
e,
|
| 1076 |
+
attempt,
|
| 1077 |
+
max_retries,
|
| 1078 |
+
)
|
| 1079 |
+
elif getattr(e, "proxy_key", None) is not None:
|
| 1080 |
+
closed = await self._browser_manager.close_browser(e.proxy_key)
|
| 1081 |
+
self._apply_closed_tabs_locked(closed)
|
| 1082 |
+
if attempt == max_retries - 1:
|
| 1083 |
+
raise RuntimeError(
|
| 1084 |
+
f"浏览器资源已失效,重试 {max_retries} 次后仍失败: {e}"
|
| 1085 |
+
) from e
|
| 1086 |
+
continue
|
| 1087 |
+
finally:
|
| 1088 |
+
if target is not None:
|
| 1089 |
+
async with self._schedule_lock:
|
| 1090 |
+
if active_session_id is not None:
|
| 1091 |
+
self._busy_sessions.discard(active_session_id)
|
| 1092 |
+
self._browser_manager.release_tab(target.proxy_key, type_name)
|
| 1093 |
+
|
| 1094 |
+
async def stream_openai_events(
|
| 1095 |
+
self,
|
| 1096 |
+
type_name: str,
|
| 1097 |
+
req: OpenAIChatRequest,
|
| 1098 |
+
) -> AsyncIterator[OpenAIStreamEvent]:
|
| 1099 |
+
"""
|
| 1100 |
+
唯一流式出口:以 OpenAIStreamEvent 为中间态。插件产出字符串流,
|
| 1101 |
+
在此包装为 content_delta + finish,供协议适配层编码为各协议 SSE。
|
| 1102 |
+
"""
|
| 1103 |
+
async for chunk in self._stream_completion(type_name, req):
|
| 1104 |
+
# session marker 也作为 content_delta 透传(对事件消费者而言是普通文本片段)
|
| 1105 |
+
yield OpenAIStreamEvent(type="content_delta", content=chunk)
|
| 1106 |
+
yield OpenAIStreamEvent(type="finish", finish_reason="stop")
|
core/api/config_routes.py
ADDED
|
@@ -0,0 +1,329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Config routes: GET/PUT /api/config and the /config dashboard entrypoint.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import logging
|
| 6 |
+
import time
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from typing import Any
|
| 9 |
+
|
| 10 |
+
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
| 11 |
+
from fastapi.responses import FileResponse, JSONResponse, RedirectResponse
|
| 12 |
+
from pydantic import BaseModel
|
| 13 |
+
|
| 14 |
+
from core.api.auth import (
|
| 15 |
+
ADMIN_SESSION_COOKIE,
|
| 16 |
+
admin_logged_in,
|
| 17 |
+
check_admin_login_rate_limit,
|
| 18 |
+
configured_config_secret_hash,
|
| 19 |
+
get_effective_auth_settings,
|
| 20 |
+
hash_config_secret,
|
| 21 |
+
normalize_api_key_text,
|
| 22 |
+
record_admin_login_failure,
|
| 23 |
+
record_admin_login_success,
|
| 24 |
+
refresh_runtime_auth_settings,
|
| 25 |
+
require_config_login,
|
| 26 |
+
require_config_login_enabled,
|
| 27 |
+
verify_config_secret,
|
| 28 |
+
)
|
| 29 |
+
from core.api.chat_handler import ChatHandler
|
| 30 |
+
from core.config.repository import (
|
| 31 |
+
APP_SETTING_AUTH_API_KEY,
|
| 32 |
+
APP_SETTING_AUTH_CONFIG_SECRET_HASH,
|
| 33 |
+
APP_SETTING_ENABLE_PRO_MODELS,
|
| 34 |
+
ConfigRepository,
|
| 35 |
+
)
|
| 36 |
+
from core.plugin.base import PluginRegistry
|
| 37 |
+
|
| 38 |
+
logger = logging.getLogger(__name__)
|
| 39 |
+
|
| 40 |
+
STATIC_DIR = Path(__file__).resolve().parent.parent / "static"
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class AdminLoginRequest(BaseModel):
|
| 44 |
+
secret: str
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class AuthSettingsUpdateRequest(BaseModel):
|
| 48 |
+
api_key: str | None = None
|
| 49 |
+
admin_password: str | None = None
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class ProModelsUpdateRequest(BaseModel):
|
| 53 |
+
enabled: bool = False
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _config_repo_of(request: Request) -> ConfigRepository:
|
| 57 |
+
repo: ConfigRepository | None = getattr(request.app.state, "config_repo", None)
|
| 58 |
+
if repo is None:
|
| 59 |
+
raise HTTPException(status_code=503, detail="Service is not ready")
|
| 60 |
+
return repo
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def _auth_settings_payload(request: Request) -> dict[str, Any]:
|
| 64 |
+
settings = get_effective_auth_settings(request)
|
| 65 |
+
return {
|
| 66 |
+
"api_key": settings.api_key_text,
|
| 67 |
+
"api_key_configured": bool(settings.api_keys),
|
| 68 |
+
"api_key_source": settings.api_key_source,
|
| 69 |
+
"api_key_env_managed": settings.api_key_env_managed,
|
| 70 |
+
"admin_password_configured": bool(settings.config_secret_hash),
|
| 71 |
+
"admin_password_source": settings.config_secret_source,
|
| 72 |
+
"admin_password_env_managed": settings.config_secret_env_managed,
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def create_config_router() -> APIRouter:
|
| 77 |
+
router = APIRouter()
|
| 78 |
+
|
| 79 |
+
@router.get("/api/types")
|
| 80 |
+
def get_types(_: None = Depends(require_config_login)) -> list[str]:
|
| 81 |
+
"""Return registered provider types for the config dashboard."""
|
| 82 |
+
return PluginRegistry.all_types()
|
| 83 |
+
|
| 84 |
+
@router.get("/api/config")
|
| 85 |
+
def get_config(
|
| 86 |
+
request: Request, _: None = Depends(require_config_login)
|
| 87 |
+
) -> list[dict[str, Any]]:
|
| 88 |
+
"""Return raw proxy-group and account configuration."""
|
| 89 |
+
return _config_repo_of(request).load_raw()
|
| 90 |
+
|
| 91 |
+
@router.get("/api/models/{provider}/metadata")
|
| 92 |
+
def get_public_model_metadata(provider: str) -> dict[str, Any]:
|
| 93 |
+
try:
|
| 94 |
+
return PluginRegistry.model_metadata(provider)
|
| 95 |
+
except ValueError as exc:
|
| 96 |
+
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
| 97 |
+
|
| 98 |
+
@router.get("/api/config/models")
|
| 99 |
+
def get_model_metadata(_: None = Depends(require_config_login)) -> dict[str, Any]:
|
| 100 |
+
return PluginRegistry.model_metadata("claude")
|
| 101 |
+
|
| 102 |
+
@router.get("/api/config/auth-settings")
|
| 103 |
+
def get_auth_settings(
|
| 104 |
+
request: Request, _: None = Depends(require_config_login)
|
| 105 |
+
) -> dict[str, Any]:
|
| 106 |
+
return _auth_settings_payload(request)
|
| 107 |
+
|
| 108 |
+
@router.put("/api/config/auth-settings")
|
| 109 |
+
def put_auth_settings(
|
| 110 |
+
payload: AuthSettingsUpdateRequest,
|
| 111 |
+
request: Request,
|
| 112 |
+
_: None = Depends(require_config_login),
|
| 113 |
+
) -> dict[str, Any]:
|
| 114 |
+
repo = _config_repo_of(request)
|
| 115 |
+
if payload.api_key is not None:
|
| 116 |
+
repo.set_app_setting(
|
| 117 |
+
APP_SETTING_AUTH_API_KEY,
|
| 118 |
+
normalize_api_key_text(payload.api_key),
|
| 119 |
+
)
|
| 120 |
+
if payload.admin_password is not None:
|
| 121 |
+
password = payload.admin_password.strip()
|
| 122 |
+
repo.set_app_setting(
|
| 123 |
+
APP_SETTING_AUTH_CONFIG_SECRET_HASH,
|
| 124 |
+
hash_config_secret(password) if password else "",
|
| 125 |
+
)
|
| 126 |
+
refresh_runtime_auth_settings(request.app)
|
| 127 |
+
settings_payload = _auth_settings_payload(request)
|
| 128 |
+
if payload.admin_password is not None and payload.admin_password.strip():
|
| 129 |
+
store = getattr(request.app.state, "admin_sessions", None)
|
| 130 |
+
if store is not None:
|
| 131 |
+
token = (request.cookies.get(ADMIN_SESSION_COOKIE) or "").strip()
|
| 132 |
+
store.revoke(token)
|
| 133 |
+
return {"status": "ok", "settings": settings_payload}
|
| 134 |
+
|
| 135 |
+
@router.get("/api/config/pro-models")
|
| 136 |
+
def get_pro_models(
|
| 137 |
+
request: Request, _: None = Depends(require_config_login)
|
| 138 |
+
) -> dict[str, Any]:
|
| 139 |
+
repo = _config_repo_of(request)
|
| 140 |
+
enabled = repo.get_app_setting(APP_SETTING_ENABLE_PRO_MODELS) == "true"
|
| 141 |
+
return {"enabled": enabled}
|
| 142 |
+
|
| 143 |
+
@router.put("/api/config/pro-models")
|
| 144 |
+
def put_pro_models(
|
| 145 |
+
payload: ProModelsUpdateRequest,
|
| 146 |
+
request: Request,
|
| 147 |
+
_: None = Depends(require_config_login),
|
| 148 |
+
) -> dict[str, Any]:
|
| 149 |
+
repo = _config_repo_of(request)
|
| 150 |
+
repo.set_app_setting(
|
| 151 |
+
APP_SETTING_ENABLE_PRO_MODELS, "true" if payload.enabled else "false"
|
| 152 |
+
)
|
| 153 |
+
return {"status": "ok", "enabled": payload.enabled}
|
| 154 |
+
|
| 155 |
+
@router.get("/api/config/status")
|
| 156 |
+
def get_config_status(
|
| 157 |
+
request: Request, _: None = Depends(require_config_login)
|
| 158 |
+
) -> dict[str, Any]:
|
| 159 |
+
"""Return runtime account status for the config dashboard."""
|
| 160 |
+
repo = _config_repo_of(request)
|
| 161 |
+
handler: ChatHandler | None = getattr(request.app.state, "chat_handler", None)
|
| 162 |
+
if handler is None:
|
| 163 |
+
raise HTTPException(status_code=503, detail="Service is not ready")
|
| 164 |
+
runtime_status = handler.get_account_runtime_status()
|
| 165 |
+
now = int(time.time())
|
| 166 |
+
accounts: dict[str, dict[str, Any]] = {}
|
| 167 |
+
for group in repo.load_groups():
|
| 168 |
+
for account in group.accounts:
|
| 169 |
+
account_id = f"{group.fingerprint_id}:{account.name}"
|
| 170 |
+
runtime = runtime_status.get(account_id, {})
|
| 171 |
+
is_frozen = (
|
| 172 |
+
account.unfreeze_at is not None and int(account.unfreeze_at) > now
|
| 173 |
+
)
|
| 174 |
+
accounts[account_id] = {
|
| 175 |
+
"fingerprint_id": group.fingerprint_id,
|
| 176 |
+
"account_name": account.name,
|
| 177 |
+
"enabled": account.enabled,
|
| 178 |
+
"unfreeze_at": account.unfreeze_at,
|
| 179 |
+
"is_frozen": is_frozen,
|
| 180 |
+
"is_active": bool(runtime.get("is_active")),
|
| 181 |
+
"tab_state": runtime.get("tab_state"),
|
| 182 |
+
"accepting_new": runtime.get("accepting_new"),
|
| 183 |
+
"active_requests": runtime.get("active_requests", 0),
|
| 184 |
+
}
|
| 185 |
+
return {"now": now, "accounts": accounts}
|
| 186 |
+
|
| 187 |
+
@router.put("/api/config")
|
| 188 |
+
async def put_config(
|
| 189 |
+
request: Request,
|
| 190 |
+
config: list[dict[str, Any]],
|
| 191 |
+
_: None = Depends(require_config_login),
|
| 192 |
+
) -> dict[str, Any]:
|
| 193 |
+
"""Update configuration and apply it immediately."""
|
| 194 |
+
repo = _config_repo_of(request)
|
| 195 |
+
if not config:
|
| 196 |
+
raise HTTPException(status_code=400, detail="Configuration must not be empty")
|
| 197 |
+
for i, g in enumerate(config):
|
| 198 |
+
if not isinstance(g, dict):
|
| 199 |
+
raise HTTPException(
|
| 200 |
+
status_code=400,
|
| 201 |
+
detail=f"Item {i + 1} must be an object",
|
| 202 |
+
)
|
| 203 |
+
if "fingerprint_id" not in g:
|
| 204 |
+
raise HTTPException(
|
| 205 |
+
status_code=400,
|
| 206 |
+
detail=f"Proxy group {i + 1} is missing field: fingerprint_id",
|
| 207 |
+
)
|
| 208 |
+
use_proxy = g.get("use_proxy", True)
|
| 209 |
+
if isinstance(use_proxy, str):
|
| 210 |
+
use_proxy = use_proxy.strip().lower() not in {
|
| 211 |
+
"0",
|
| 212 |
+
"false",
|
| 213 |
+
"no",
|
| 214 |
+
"off",
|
| 215 |
+
}
|
| 216 |
+
else:
|
| 217 |
+
use_proxy = bool(use_proxy)
|
| 218 |
+
if use_proxy and not str(g.get("proxy_host", "")).strip():
|
| 219 |
+
raise HTTPException(
|
| 220 |
+
status_code=400,
|
| 221 |
+
detail=f"Proxy group {i + 1} has proxy enabled and requires proxy_host",
|
| 222 |
+
)
|
| 223 |
+
accounts = g.get("accounts", [])
|
| 224 |
+
if not accounts:
|
| 225 |
+
raise HTTPException(
|
| 226 |
+
status_code=400,
|
| 227 |
+
detail=f"Proxy group {i + 1} must include at least one account",
|
| 228 |
+
)
|
| 229 |
+
for j, a in enumerate(accounts):
|
| 230 |
+
if not isinstance(a, dict) or not (a.get("name") or "").strip():
|
| 231 |
+
raise HTTPException(
|
| 232 |
+
status_code=400,
|
| 233 |
+
detail=f"Account {j + 1} in proxy group {i + 1} must include name",
|
| 234 |
+
)
|
| 235 |
+
if not (a.get("type") or "").strip():
|
| 236 |
+
raise HTTPException(
|
| 237 |
+
status_code=400,
|
| 238 |
+
detail=f"Account {j + 1} in proxy group {i + 1} must include type (for example: claude)",
|
| 239 |
+
)
|
| 240 |
+
if "enabled" in a and not isinstance(
|
| 241 |
+
a.get("enabled"), (bool, int, str)
|
| 242 |
+
):
|
| 243 |
+
raise HTTPException(
|
| 244 |
+
status_code=400,
|
| 245 |
+
detail=f"Account {j + 1} in proxy group {i + 1} has an invalid enabled value",
|
| 246 |
+
)
|
| 247 |
+
try:
|
| 248 |
+
repo.save_raw(config)
|
| 249 |
+
except Exception as e:
|
| 250 |
+
logger.exception("Failed to save configuration")
|
| 251 |
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
| 252 |
+
# Apply immediately: reload groups and refresh the active chat handler.
|
| 253 |
+
try:
|
| 254 |
+
groups = repo.load_groups()
|
| 255 |
+
handler: ChatHandler | None = getattr(
|
| 256 |
+
request.app.state, "chat_handler", None
|
| 257 |
+
)
|
| 258 |
+
if handler is None:
|
| 259 |
+
raise RuntimeError("chat_handler is not initialized")
|
| 260 |
+
await handler.refresh_configuration(groups, config_repo=repo)
|
| 261 |
+
except Exception as e:
|
| 262 |
+
logger.exception("Failed to reload account pool")
|
| 263 |
+
raise HTTPException(
|
| 264 |
+
status_code=500,
|
| 265 |
+
detail=f"Configuration was saved but reload failed: {e}",
|
| 266 |
+
) from e
|
| 267 |
+
return {"status": "ok", "message": "Configuration saved and applied"}
|
| 268 |
+
|
| 269 |
+
@router.get("/login", response_model=None)
|
| 270 |
+
def login_page(request: Request) -> FileResponse | RedirectResponse:
|
| 271 |
+
require_config_login_enabled(request)
|
| 272 |
+
if admin_logged_in(request):
|
| 273 |
+
return RedirectResponse(url="/config", status_code=302)
|
| 274 |
+
path = STATIC_DIR / "login.html"
|
| 275 |
+
if not path.is_file():
|
| 276 |
+
raise HTTPException(status_code=404, detail="Login page is not ready")
|
| 277 |
+
return FileResponse(path)
|
| 278 |
+
|
| 279 |
+
@router.post("/api/admin/login", response_model=None)
|
| 280 |
+
def admin_login(payload: AdminLoginRequest, request: Request) -> Response:
|
| 281 |
+
require_config_login_enabled(request)
|
| 282 |
+
check_admin_login_rate_limit(request)
|
| 283 |
+
secret = payload.secret.strip()
|
| 284 |
+
encoded = configured_config_secret_hash(_config_repo_of(request))
|
| 285 |
+
if not secret or not encoded or not verify_config_secret(secret, encoded):
|
| 286 |
+
lock_seconds = record_admin_login_failure(request)
|
| 287 |
+
if lock_seconds > 0:
|
| 288 |
+
raise HTTPException(
|
| 289 |
+
status_code=429,
|
| 290 |
+
detail=f"Too many failed login attempts. Try again in {lock_seconds} seconds.",
|
| 291 |
+
)
|
| 292 |
+
raise HTTPException(status_code=401, detail="Sign-in failed. Password is incorrect.")
|
| 293 |
+
record_admin_login_success(request)
|
| 294 |
+
store = request.app.state.admin_sessions
|
| 295 |
+
token = store.create()
|
| 296 |
+
response = JSONResponse({"status": "ok"})
|
| 297 |
+
response.set_cookie(
|
| 298 |
+
key=ADMIN_SESSION_COOKIE,
|
| 299 |
+
value=token,
|
| 300 |
+
httponly=True,
|
| 301 |
+
samesite="lax",
|
| 302 |
+
secure=request.url.scheme == "https",
|
| 303 |
+
max_age=store.ttl_seconds,
|
| 304 |
+
path="/",
|
| 305 |
+
)
|
| 306 |
+
return response
|
| 307 |
+
|
| 308 |
+
@router.post("/api/admin/logout", response_model=None)
|
| 309 |
+
def admin_logout(request: Request) -> Response:
|
| 310 |
+
token = (request.cookies.get(ADMIN_SESSION_COOKIE) or "").strip()
|
| 311 |
+
store = getattr(request.app.state, "admin_sessions", None)
|
| 312 |
+
if store is not None:
|
| 313 |
+
store.revoke(token)
|
| 314 |
+
response = JSONResponse({"status": "ok"})
|
| 315 |
+
response.delete_cookie(ADMIN_SESSION_COOKIE, path="/")
|
| 316 |
+
return response
|
| 317 |
+
|
| 318 |
+
@router.get("/config", response_model=None)
|
| 319 |
+
def config_page(request: Request) -> FileResponse | RedirectResponse:
|
| 320 |
+
"""配置页入口。"""
|
| 321 |
+
require_config_login_enabled(request)
|
| 322 |
+
if not admin_logged_in(request):
|
| 323 |
+
return RedirectResponse(url="/login", status_code=302)
|
| 324 |
+
path = STATIC_DIR / "config.html"
|
| 325 |
+
if not path.is_file():
|
| 326 |
+
raise HTTPException(status_code=404, detail="Config page is not ready")
|
| 327 |
+
return FileResponse(path)
|
| 328 |
+
|
| 329 |
+
return router
|
core/api/conv_parser.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
会话 ID 携带方式:任意字符串 → base64 → 零宽字符编码,用特殊零宽标记组包裹。
|
| 3 |
+
从对话内容中通过正则匹配起止标记提取会话 ID,与 session_id 的具体格式无关。
|
| 4 |
+
|
| 5 |
+
编码协议:
|
| 6 |
+
session_id (utf-8)
|
| 7 |
+
→ base64 (A-Za-z0-9+/=,最多 65 个不同符号)
|
| 8 |
+
→ 每个 base64 字符用 3 位 base-5 零宽字符表示(5³=125 ≥ 65)
|
| 9 |
+
→ 有效索引范围 0..64(64 个字符 + padding),故三元组首位最大为 2(3*25=75 > 64)
|
| 10 |
+
→ 因此首位为 ZW[3] 或 ZW[4] 的三元组绝不出现在正文中
|
| 11 |
+
→ HEAD_MARK/TAIL_MARK 正是利用首位 ≥ 3 的三元组构造,保证不会误中正文
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
import base64
|
| 15 |
+
import re
|
| 16 |
+
from typing import Any
|
| 17 |
+
|
| 18 |
+
# 零宽字符集(5 个字符,基数 5,索引 0-4)
|
| 19 |
+
_ZERO_WIDTH = (
|
| 20 |
+
"\u200b", # 零宽空格 → 0
|
| 21 |
+
"\u200c", # 零宽非连接符 → 1
|
| 22 |
+
"\u200d", # 零宽连接符 → 2
|
| 23 |
+
"\ufeff", # 零宽非断空格 → 3
|
| 24 |
+
"\u180e", # 蒙古文元音分隔符 → 4
|
| 25 |
+
)
|
| 26 |
+
_ZW_SET = frozenset(_ZERO_WIDTH)
|
| 27 |
+
_ZW_TO_IDX = {c: i for i, c in enumerate(_ZERO_WIDTH)}
|
| 28 |
+
|
| 29 |
+
# base64 标准字符集(64 个字符),padding 符 "=" 用索引 64 表示
|
| 30 |
+
_B64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
| 31 |
+
_B64_TO_IDX = {c: i for i, c in enumerate(_B64_CHARS)}
|
| 32 |
+
_PAD_IDX = 64 # "=" 的编码索引
|
| 33 |
+
|
| 34 |
+
# 起止标记:首位均为 ZW[3] 或 ZW[4],保证不出现在 payload 三元组中
|
| 35 |
+
_HEAD_MARK = _ZERO_WIDTH[4] * 3 + _ZERO_WIDTH[3] * 3 # 6 个零宽字符
|
| 36 |
+
_TAIL_MARK = _ZERO_WIDTH[3] * 3 + _ZERO_WIDTH[4] * 3 # 6 个零宽字符
|
| 37 |
+
|
| 38 |
+
_ZW_CLASS = r"[\u200b\u200c\u200d\ufeff\u180e]"
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _encode_b64idx(idx: int) -> str:
|
| 42 |
+
"""将 base64 字符索引 (0-64) 编码为 3 个零宽字符(3 位 base-5)。"""
|
| 43 |
+
a = idx // 25
|
| 44 |
+
r = idx % 25
|
| 45 |
+
b = r // 5
|
| 46 |
+
c = r % 5
|
| 47 |
+
return _ZERO_WIDTH[a] + _ZERO_WIDTH[b] + _ZERO_WIDTH[c]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _decode_b64idx(zw3: str) -> int | None:
|
| 51 |
+
"""将 3 个零宽字符解码为 base64 字符索引(0-64),非法返回 None。"""
|
| 52 |
+
if len(zw3) != 3:
|
| 53 |
+
return None
|
| 54 |
+
a = _ZW_TO_IDX.get(zw3[0])
|
| 55 |
+
b = _ZW_TO_IDX.get(zw3[1])
|
| 56 |
+
c = _ZW_TO_IDX.get(zw3[2])
|
| 57 |
+
if a is None or b is None or c is None:
|
| 58 |
+
return None
|
| 59 |
+
val = a * 25 + b * 5 + c
|
| 60 |
+
if val > 64:
|
| 61 |
+
return None
|
| 62 |
+
return val
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def encode_session_id(session_id: str) -> str:
|
| 66 |
+
"""
|
| 67 |
+
将任意字符串会话 ID 编码为不可见的零宽序列:
|
| 68 |
+
HEAD_MARK + zero_width_encoded(base64(utf-8(session_id))) + TAIL_MARK
|
| 69 |
+
"""
|
| 70 |
+
b64 = base64.b64encode(session_id.encode()).decode()
|
| 71 |
+
out: list[str] = []
|
| 72 |
+
for ch in b64:
|
| 73 |
+
if ch == "=":
|
| 74 |
+
out.append(_encode_b64idx(_PAD_IDX))
|
| 75 |
+
else:
|
| 76 |
+
idx = _B64_TO_IDX.get(ch)
|
| 77 |
+
if idx is None:
|
| 78 |
+
return ""
|
| 79 |
+
out.append(_encode_b64idx(idx))
|
| 80 |
+
return _HEAD_MARK + "".join(out) + _TAIL_MARK
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def decode_session_id(text: str) -> str | None:
|
| 84 |
+
"""
|
| 85 |
+
从文本中提取第一个被标记包裹的会话 ID(解码零宽 → base64 → utf-8)。
|
| 86 |
+
若未找到有效标记或解码失败则返回 None。
|
| 87 |
+
"""
|
| 88 |
+
m = re.search(
|
| 89 |
+
re.escape(_HEAD_MARK) + r"(" + _ZW_CLASS + r"+?)" + re.escape(_TAIL_MARK),
|
| 90 |
+
text,
|
| 91 |
+
)
|
| 92 |
+
if not m:
|
| 93 |
+
return None
|
| 94 |
+
body = m.group(1)
|
| 95 |
+
if len(body) % 3 != 0:
|
| 96 |
+
return None
|
| 97 |
+
b64_chars: list[str] = []
|
| 98 |
+
for i in range(0, len(body), 3):
|
| 99 |
+
idx = _decode_b64idx(body[i : i + 3])
|
| 100 |
+
if idx is None:
|
| 101 |
+
return None
|
| 102 |
+
b64_chars.append("=" if idx == _PAD_IDX else _B64_CHARS[idx])
|
| 103 |
+
try:
|
| 104 |
+
return base64.b64decode("".join(b64_chars)).decode()
|
| 105 |
+
except Exception:
|
| 106 |
+
return None
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def decode_latest_session_id(text: str) -> str | None:
|
| 110 |
+
"""
|
| 111 |
+
从文本中提取最后一个被标记包裹的会话 ID。
|
| 112 |
+
用于客户端保留完整历史时,优先命中最近一次返回的 session_id。
|
| 113 |
+
"""
|
| 114 |
+
matches = list(
|
| 115 |
+
re.finditer(
|
| 116 |
+
re.escape(_HEAD_MARK) + r"(" + _ZW_CLASS + r"+?)" + re.escape(_TAIL_MARK),
|
| 117 |
+
text,
|
| 118 |
+
)
|
| 119 |
+
)
|
| 120 |
+
if not matches:
|
| 121 |
+
return None
|
| 122 |
+
body = matches[-1].group(1)
|
| 123 |
+
if len(body) % 3 != 0:
|
| 124 |
+
return None
|
| 125 |
+
b64_chars: list[str] = []
|
| 126 |
+
for i in range(0, len(body), 3):
|
| 127 |
+
idx = _decode_b64idx(body[i : i + 3])
|
| 128 |
+
if idx is None:
|
| 129 |
+
return None
|
| 130 |
+
b64_chars.append("=" if idx == _PAD_IDX else _B64_CHARS[idx])
|
| 131 |
+
try:
|
| 132 |
+
return base64.b64decode("".join(b64_chars)).decode()
|
| 133 |
+
except Exception:
|
| 134 |
+
return None
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def extract_session_id_marker(text: str) -> str:
|
| 138 |
+
"""
|
| 139 |
+
从文本中提取完整的零宽会话 ID 标记段(HEAD_MARK + body + TAIL_MARK),
|
| 140 |
+
用于在 tool_calls 的 text_content 中携带会话 ID 至下一轮对话。
|
| 141 |
+
若未找到则返回空字符串。
|
| 142 |
+
"""
|
| 143 |
+
m = re.search(
|
| 144 |
+
re.escape(_HEAD_MARK) + _ZW_CLASS + r"+?" + re.escape(_TAIL_MARK),
|
| 145 |
+
text,
|
| 146 |
+
)
|
| 147 |
+
return m.group(0) if m else ""
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def session_id_suffix(session_id: str) -> str:
|
| 151 |
+
"""返回响应末尾需附加的不可见标记(含 HEAD/TAIL 包裹的零宽编码会话 ID)。"""
|
| 152 |
+
return encode_session_id(session_id)
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def strip_session_id_suffix(text: str) -> str:
|
| 156 |
+
"""去掉文本中所有零宽会话 ID 标记段(HEAD_MARK...TAIL_MARK),返回干净正文。"""
|
| 157 |
+
return re.sub(
|
| 158 |
+
re.escape(_HEAD_MARK) + _ZW_CLASS + r"+?" + re.escape(_TAIL_MARK),
|
| 159 |
+
"",
|
| 160 |
+
text,
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def _normalize_content(content: str | list[Any]) -> str:
|
| 165 |
+
if isinstance(content, str):
|
| 166 |
+
return content
|
| 167 |
+
parts: list[str] = []
|
| 168 |
+
for p in content:
|
| 169 |
+
if isinstance(p, dict) and p.get("type") == "text" and "text" in p:
|
| 170 |
+
parts.append(str(p["text"]))
|
| 171 |
+
elif isinstance(p, str):
|
| 172 |
+
parts.append(p)
|
| 173 |
+
return " ".join(parts)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def parse_conv_uuid_from_messages(messages: list[dict[str, Any]]) -> str | None:
|
| 177 |
+
"""从 messages 中解析最新会话 ID(从最后一条带标记的消息开始逆序查找)。"""
|
| 178 |
+
for m in reversed(messages):
|
| 179 |
+
content = m.get("content")
|
| 180 |
+
if content is None:
|
| 181 |
+
continue
|
| 182 |
+
text = _normalize_content(content)
|
| 183 |
+
decoded = decode_latest_session_id(text)
|
| 184 |
+
if decoded is not None:
|
| 185 |
+
return decoded
|
| 186 |
+
return None
|
core/api/fingerprint.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""会话指纹:基于 system prompt + 首条 user 消息计算 SHA-256 指纹。
|
| 2 |
+
|
| 3 |
+
同一逻辑对话(相同 system + 相同首条 user)的指纹恒定,
|
| 4 |
+
不同对话指纹不同,杜绝上下文污染。
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import hashlib
|
| 8 |
+
|
| 9 |
+
from core.api.schemas import OpenAIMessage
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _norm_content(content: str | list | None) -> str:
|
| 13 |
+
if content is None:
|
| 14 |
+
return ""
|
| 15 |
+
if isinstance(content, str):
|
| 16 |
+
return content.strip()
|
| 17 |
+
# list[OpenAIContentPart]
|
| 18 |
+
parts: list[str] = []
|
| 19 |
+
for p in content:
|
| 20 |
+
if hasattr(p, "type") and p.type == "text" and p.text:
|
| 21 |
+
parts.append(p.text.strip())
|
| 22 |
+
return " ".join(parts)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def compute_conversation_fingerprint(messages: list[OpenAIMessage]) -> str:
|
| 26 |
+
"""sha256(system_prompt + first_user_message)[:16]
|
| 27 |
+
|
| 28 |
+
Returns empty string if no user message found.
|
| 29 |
+
"""
|
| 30 |
+
system_text = ""
|
| 31 |
+
first_user_text = ""
|
| 32 |
+
for m in messages:
|
| 33 |
+
if m.role == "system" and not system_text:
|
| 34 |
+
system_text = _norm_content(m.content)
|
| 35 |
+
elif m.role == "user" and not first_user_text:
|
| 36 |
+
first_user_text = _norm_content(m.content)
|
| 37 |
+
break
|
| 38 |
+
if not first_user_text:
|
| 39 |
+
return ""
|
| 40 |
+
raw = f"{system_text}\n{first_user_text}"
|
| 41 |
+
return hashlib.sha256(raw.encode()).hexdigest()[:16]
|
core/api/function_call.py
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Function Call 层:解析模型输出的 <tool_call> 格式,转换为 OpenAI tool_calls;
|
| 3 |
+
将 tools 和 tool 结果拼入 prompt。对外统一使用 OpenAI 格式。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import json
|
| 7 |
+
import re
|
| 8 |
+
import uuid
|
| 9 |
+
from collections.abc import Callable
|
| 10 |
+
from typing import Any
|
| 11 |
+
|
| 12 |
+
TOOL_CALL_PREFIX = "<tool_call>"
|
| 13 |
+
TOOL_CALL_PREFIX_LEN = len(TOOL_CALL_PREFIX)
|
| 14 |
+
TOOL_CALL_PATTERN = re.compile(
|
| 15 |
+
r"<tool_call>\s*(.*?)\s*</tool_call>",
|
| 16 |
+
re.DOTALL,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def parse_tool_calls(text: str) -> list[dict[str, Any]]:
|
| 21 |
+
"""
|
| 22 |
+
从文本中解析所有 <tool_call>...</tool_call> 块。
|
| 23 |
+
返回 [{"name": str, "arguments": dict | str}, ...]
|
| 24 |
+
"""
|
| 25 |
+
if not text or not text.strip():
|
| 26 |
+
return []
|
| 27 |
+
matches = TOOL_CALL_PATTERN.findall(text)
|
| 28 |
+
result: list[dict[str, Any]] = []
|
| 29 |
+
for m in matches:
|
| 30 |
+
try:
|
| 31 |
+
obj = json.loads(m.strip())
|
| 32 |
+
if isinstance(obj, dict) and "name" in obj:
|
| 33 |
+
args = obj.get("arguments", {})
|
| 34 |
+
if isinstance(args, str):
|
| 35 |
+
try:
|
| 36 |
+
args = json.loads(args)
|
| 37 |
+
except json.JSONDecodeError:
|
| 38 |
+
args = {}
|
| 39 |
+
result.append({"name": obj["name"], "arguments": args})
|
| 40 |
+
except json.JSONDecodeError:
|
| 41 |
+
pass
|
| 42 |
+
return result
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def detect_tool_call_mode(buffer: str, *, strip_session_id: bool = True) -> bool | None:
|
| 46 |
+
"""
|
| 47 |
+
根据 buffer 内容判断是否为 tool_call 模式。
|
| 48 |
+
None=尚未确定,True=tool_call,False=普通文本。
|
| 49 |
+
strip_session_id: 若 True,先去掉开头的零宽 session_id 前缀再判断。
|
| 50 |
+
"""
|
| 51 |
+
content = buffer
|
| 52 |
+
if strip_session_id:
|
| 53 |
+
from core.api.conv_parser import strip_session_id_suffix
|
| 54 |
+
|
| 55 |
+
content = strip_session_id_suffix(buffer)
|
| 56 |
+
stripped = content.lstrip()
|
| 57 |
+
if stripped.startswith(TOOL_CALL_PREFIX):
|
| 58 |
+
return True
|
| 59 |
+
if len(stripped) > TOOL_CALL_PREFIX_LEN:
|
| 60 |
+
return False
|
| 61 |
+
return None
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def format_tools_for_prompt(tools: list[dict[str, Any]]) -> str:
|
| 65 |
+
"""
|
| 66 |
+
将 OpenAI 格式的 tools 转为可读文本,用于 prompt。
|
| 67 |
+
兼容 OpenAI 格式 {type, function: {name, description, parameters}}
|
| 68 |
+
和 Cursor 格式 {name, description, input_schema}。
|
| 69 |
+
"""
|
| 70 |
+
if not tools:
|
| 71 |
+
return ""
|
| 72 |
+
lines: list[str] = []
|
| 73 |
+
for t in tools:
|
| 74 |
+
if not isinstance(t, dict):
|
| 75 |
+
continue
|
| 76 |
+
fn = t.get("function") if t.get("type") == "function" else t
|
| 77 |
+
if not isinstance(fn, dict):
|
| 78 |
+
fn = t
|
| 79 |
+
name = fn.get("name")
|
| 80 |
+
if not name:
|
| 81 |
+
continue
|
| 82 |
+
desc = fn.get("description") or fn.get("summary") or ""
|
| 83 |
+
params = fn.get("parameters") or fn.get("input_schema") or {}
|
| 84 |
+
if isinstance(params, str):
|
| 85 |
+
try:
|
| 86 |
+
params = json.loads(params)
|
| 87 |
+
except json.JSONDecodeError:
|
| 88 |
+
params = {}
|
| 89 |
+
props = params.get("properties") or {}
|
| 90 |
+
required = params.get("required") or []
|
| 91 |
+
args_desc = ", ".join(
|
| 92 |
+
f"{k}: {v.get('type', 'any')}" + (" (必填)" if k in required else "")
|
| 93 |
+
for k, v in props.items()
|
| 94 |
+
)
|
| 95 |
+
lines.append(
|
| 96 |
+
f"- {name}({args_desc}): {desc[:200]}" + ("..." if len(desc) > 200 else "")
|
| 97 |
+
)
|
| 98 |
+
return "\n".join(lines) if lines else ""
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def build_tool_calls_response(
|
| 102 |
+
tool_calls_list: list[dict[str, Any]],
|
| 103 |
+
chat_id: str,
|
| 104 |
+
model: str,
|
| 105 |
+
created: int,
|
| 106 |
+
*,
|
| 107 |
+
text_content: str = "",
|
| 108 |
+
) -> dict[str, Any]:
|
| 109 |
+
"""返回 OpenAI 格式的 chat.completion(含 tool_calls)。
|
| 110 |
+
message.content 为字符串(或空时 null),tool_calls 为 OpenAI 标准数组。
|
| 111 |
+
"""
|
| 112 |
+
tool_calls: list[dict[str, Any]] = []
|
| 113 |
+
for tc in tool_calls_list:
|
| 114 |
+
name = tc.get("name", "")
|
| 115 |
+
args = tc.get("arguments", {})
|
| 116 |
+
if isinstance(args, dict):
|
| 117 |
+
args_str = json.dumps(args, ensure_ascii=False)
|
| 118 |
+
else:
|
| 119 |
+
try:
|
| 120 |
+
args_obj = json.loads(str(args)) if args else {}
|
| 121 |
+
args_str = json.dumps(args_obj, ensure_ascii=False)
|
| 122 |
+
except json.JSONDecodeError:
|
| 123 |
+
args_str = "{}"
|
| 124 |
+
call_id = f"call_{uuid.uuid4().hex[:24]}"
|
| 125 |
+
tool_calls.append(
|
| 126 |
+
{
|
| 127 |
+
"id": call_id,
|
| 128 |
+
"type": "function",
|
| 129 |
+
"function": {"name": name, "arguments": args_str},
|
| 130 |
+
}
|
| 131 |
+
)
|
| 132 |
+
message: dict[str, Any] = {
|
| 133 |
+
"role": "assistant",
|
| 134 |
+
"content": text_content if text_content else None,
|
| 135 |
+
"tool_calls": tool_calls,
|
| 136 |
+
}
|
| 137 |
+
return {
|
| 138 |
+
"id": chat_id,
|
| 139 |
+
"object": "chat.completion",
|
| 140 |
+
"created": created,
|
| 141 |
+
"model": model,
|
| 142 |
+
"choices": [
|
| 143 |
+
{
|
| 144 |
+
"index": 0,
|
| 145 |
+
"message": message,
|
| 146 |
+
"finish_reason": "tool_calls",
|
| 147 |
+
}
|
| 148 |
+
],
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def _openai_sse_chunk(
|
| 153 |
+
chat_id: str,
|
| 154 |
+
model: str,
|
| 155 |
+
created: int,
|
| 156 |
+
delta: dict,
|
| 157 |
+
finish_reason: str | None = None,
|
| 158 |
+
) -> str:
|
| 159 |
+
"""构建 OpenAI 流式 SSE:data: <json>\\n\\n"""
|
| 160 |
+
choice: dict[str, Any] = {"index": 0, "delta": delta}
|
| 161 |
+
if finish_reason is not None:
|
| 162 |
+
choice["finish_reason"] = finish_reason
|
| 163 |
+
data = {
|
| 164 |
+
"id": chat_id,
|
| 165 |
+
"object": "chat.completion.chunk",
|
| 166 |
+
"created": created,
|
| 167 |
+
"model": model,
|
| 168 |
+
"choices": [choice],
|
| 169 |
+
}
|
| 170 |
+
return f"data: {json.dumps(data, ensure_ascii=False)}\n\n"
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def build_openai_text_sse_events(
|
| 174 |
+
chat_id: str,
|
| 175 |
+
model: str,
|
| 176 |
+
created: int,
|
| 177 |
+
) -> tuple[str, Callable[[str], str], Callable[[], str]]:
|
| 178 |
+
"""返回 OpenAI 流式事件的工厂。
|
| 179 |
+
返回 (msg_start_sse, make_delta_sse, make_stop_sse)。
|
| 180 |
+
msg_start 为带 role 的首 chunk。
|
| 181 |
+
"""
|
| 182 |
+
|
| 183 |
+
def msg_start() -> str:
|
| 184 |
+
return _openai_sse_chunk(
|
| 185 |
+
chat_id,
|
| 186 |
+
model,
|
| 187 |
+
created,
|
| 188 |
+
delta={"role": "assistant", "content": ""},
|
| 189 |
+
finish_reason=None,
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
def make_delta_sse(text: str) -> str:
|
| 193 |
+
return _openai_sse_chunk(
|
| 194 |
+
chat_id,
|
| 195 |
+
model,
|
| 196 |
+
created,
|
| 197 |
+
delta={
|
| 198 |
+
"content": text,
|
| 199 |
+
},
|
| 200 |
+
finish_reason=None,
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
def make_stop_sse() -> str:
|
| 204 |
+
return (
|
| 205 |
+
_openai_sse_chunk(
|
| 206 |
+
chat_id,
|
| 207 |
+
model,
|
| 208 |
+
created,
|
| 209 |
+
delta={},
|
| 210 |
+
finish_reason="stop",
|
| 211 |
+
)
|
| 212 |
+
+ "data: [DONE]\n\n"
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
return msg_start(), make_delta_sse, make_stop_sse
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def build_tool_calls_with_ids(
|
| 219 |
+
tool_calls_list: list[dict[str, Any]],
|
| 220 |
+
) -> list[dict[str, Any]]:
|
| 221 |
+
"""从 name+arguments 的 tool_calls_list 构建带 id 的 OpenAI 格式 tool_calls。
|
| 222 |
+
用于流式下发与 debug 保存共用同一批 id,保证下一轮 request 的 tool_call_id 一致。
|
| 223 |
+
"""
|
| 224 |
+
tool_calls: list[dict[str, Any]] = []
|
| 225 |
+
for i, tc in enumerate(tool_calls_list):
|
| 226 |
+
name = tc.get("name", "")
|
| 227 |
+
args = tc.get("arguments", {})
|
| 228 |
+
if isinstance(args, dict):
|
| 229 |
+
args_str = json.dumps(args, ensure_ascii=False)
|
| 230 |
+
else:
|
| 231 |
+
try:
|
| 232 |
+
args_obj = json.loads(str(args)) if args else {}
|
| 233 |
+
args_str = json.dumps(args_obj, ensure_ascii=False)
|
| 234 |
+
except json.JSONDecodeError:
|
| 235 |
+
args_str = "{}"
|
| 236 |
+
tool_calls.append(
|
| 237 |
+
{
|
| 238 |
+
"index": i,
|
| 239 |
+
"id": f"call_{uuid.uuid4().hex[:24]}",
|
| 240 |
+
"type": "function",
|
| 241 |
+
"function": {"name": name, "arguments": args_str},
|
| 242 |
+
}
|
| 243 |
+
)
|
| 244 |
+
return tool_calls
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def build_openai_tool_use_sse_events(
|
| 248 |
+
tool_calls_list: list[dict[str, Any]],
|
| 249 |
+
chat_id: str,
|
| 250 |
+
model: str,
|
| 251 |
+
created: int,
|
| 252 |
+
*,
|
| 253 |
+
text_content: str = "",
|
| 254 |
+
tool_calls_with_ids: list[dict[str, Any]] | None = None,
|
| 255 |
+
) -> tuple[list[str], list[dict[str, Any]]]:
|
| 256 |
+
"""构建 OpenAI 流式 SSE 事件,用于 tool_calls 场景。
|
| 257 |
+
有 text_content(如 thinking)时:先发 content chunk,再发 tool_calls chunk,便于客户端先展示思考再展示工具调用。
|
| 258 |
+
无 text_content 时:单 chunk 发 role + tool_calls。
|
| 259 |
+
tool_calls 场景最后只发 finish_reason,不发 data: [DONE](think 之后不跟 [DONE])。
|
| 260 |
+
"""
|
| 261 |
+
if tool_calls_with_ids is not None:
|
| 262 |
+
tool_calls = tool_calls_with_ids
|
| 263 |
+
else:
|
| 264 |
+
tool_calls = build_tool_calls_with_ids(tool_calls_list)
|
| 265 |
+
sse_list: list[str] = []
|
| 266 |
+
if text_content:
|
| 267 |
+
# 先发 content(thinking),再发 tool_calls,同一条消息内顺序展示
|
| 268 |
+
sse_list.append(
|
| 269 |
+
_openai_sse_chunk(
|
| 270 |
+
chat_id,
|
| 271 |
+
model,
|
| 272 |
+
created,
|
| 273 |
+
{"role": "assistant", "content": text_content},
|
| 274 |
+
None,
|
| 275 |
+
)
|
| 276 |
+
)
|
| 277 |
+
sse_list.append(
|
| 278 |
+
_openai_sse_chunk(chat_id, model, created, {"tool_calls": tool_calls}, None)
|
| 279 |
+
)
|
| 280 |
+
else:
|
| 281 |
+
sse_list.append(
|
| 282 |
+
_openai_sse_chunk(
|
| 283 |
+
chat_id,
|
| 284 |
+
model,
|
| 285 |
+
created,
|
| 286 |
+
{
|
| 287 |
+
"role": "assistant",
|
| 288 |
+
"content": "",
|
| 289 |
+
"tool_calls": tool_calls,
|
| 290 |
+
},
|
| 291 |
+
None,
|
| 292 |
+
)
|
| 293 |
+
)
|
| 294 |
+
sse_list.append(_openai_sse_chunk(chat_id, model, created, {}, "tool_calls"))
|
| 295 |
+
return (sse_list, tool_calls)
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def stream_openai_tool_use_sse_events(
|
| 299 |
+
tool_calls_list: list[dict[str, Any]],
|
| 300 |
+
chat_id: str,
|
| 301 |
+
model: str,
|
| 302 |
+
created: int,
|
| 303 |
+
*,
|
| 304 |
+
tool_calls_with_ids: list[dict[str, Any]] | None = None,
|
| 305 |
+
) -> list[str]:
|
| 306 |
+
"""
|
| 307 |
+
流式下发 tool_calls:先发每个 tool 的 id/name(arguments 为空),
|
| 308 |
+
再逐个发 arguments 分片,最后发 finish_reason。便于客户端逐步展示。
|
| 309 |
+
content(如 <think>)由调用方已通过 delta 流式发完,此处只发 tool_calls 相关 chunk。
|
| 310 |
+
"""
|
| 311 |
+
if tool_calls_with_ids is not None:
|
| 312 |
+
tool_calls = tool_calls_with_ids
|
| 313 |
+
else:
|
| 314 |
+
tool_calls = build_tool_calls_with_ids(tool_calls_list)
|
| 315 |
+
sse_list: list[str] = []
|
| 316 |
+
# 第一块:仅 id + type + name,arguments 为空,让客户端先展示“正在调用 xxx”
|
| 317 |
+
tool_calls_heads: list[dict[str, Any]] = []
|
| 318 |
+
for tc in tool_calls:
|
| 319 |
+
tool_calls_heads.append(
|
| 320 |
+
{
|
| 321 |
+
"index": tc["index"],
|
| 322 |
+
"id": tc["id"],
|
| 323 |
+
"type": "function",
|
| 324 |
+
"function": {"name": tc["function"]["name"], "arguments": ""},
|
| 325 |
+
}
|
| 326 |
+
)
|
| 327 |
+
sse_list.append(
|
| 328 |
+
_openai_sse_chunk(
|
| 329 |
+
chat_id, model, created, {"tool_calls": tool_calls_heads}, None
|
| 330 |
+
)
|
| 331 |
+
)
|
| 332 |
+
# 后续每块:只带 index + function.arguments,可整段发或分片发,这里按 tool 逐个发
|
| 333 |
+
for tc in tool_calls:
|
| 334 |
+
args = tc.get("function", {}).get("arguments", "") or ""
|
| 335 |
+
if not args:
|
| 336 |
+
continue
|
| 337 |
+
sse_list.append(
|
| 338 |
+
_openai_sse_chunk(
|
| 339 |
+
chat_id,
|
| 340 |
+
model,
|
| 341 |
+
created,
|
| 342 |
+
{
|
| 343 |
+
"tool_calls": [
|
| 344 |
+
{"index": tc["index"], "function": {"arguments": args}}
|
| 345 |
+
]
|
| 346 |
+
},
|
| 347 |
+
None,
|
| 348 |
+
)
|
| 349 |
+
)
|
| 350 |
+
sse_list.append(_openai_sse_chunk(chat_id, model, created, {}, "tool_calls"))
|
| 351 |
+
return sse_list
|
core/api/mock_claude.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Mock Claude API:与 claude.py 调用格式兼容,不消耗 token。
|
| 3 |
+
设置 CLAUDE_START_URL 和 CLAUDE_API_BASE 指向 http://ip:port/mock 即可调试。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import json
|
| 8 |
+
import uuid as uuid_mod
|
| 9 |
+
from collections.abc import AsyncIterator
|
| 10 |
+
|
| 11 |
+
from fastapi import APIRouter
|
| 12 |
+
from fastapi.responses import HTMLResponse, StreamingResponse
|
| 13 |
+
|
| 14 |
+
router = APIRouter(prefix="/mock", tags=["mock"])
|
| 15 |
+
|
| 16 |
+
MOCK_ORG_UUID = "00000000-0000-0000-0000-000000000001"
|
| 17 |
+
|
| 18 |
+
# 自定义回复:请求来时在终端用多行输入要回复的内容
|
| 19 |
+
INPUT_PROMPT = "Mock 回复内容(支持多行,空行结束):"
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@router.get("", response_class=HTMLResponse)
|
| 23 |
+
@router.get("/", response_class=HTMLResponse)
|
| 24 |
+
def mock_start_page() -> str:
|
| 25 |
+
"""CLAUDE_START_URL 指向 /mock 时,浏览器加载此页。"""
|
| 26 |
+
return """
|
| 27 |
+
<!DOCTYPE html>
|
| 28 |
+
<html><head><title>Mock Claude</title></head>
|
| 29 |
+
<body><p>Mock Claude - 调试用</p></body>
|
| 30 |
+
</html>
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@router.get("/account")
|
| 35 |
+
def mock_account() -> dict:
|
| 36 |
+
"""_get_org_uuid 调用的 GET /account,返回 memberships 含 org uuid。"""
|
| 37 |
+
return {
|
| 38 |
+
"memberships": [
|
| 39 |
+
{"organization": {"uuid": MOCK_ORG_UUID}},
|
| 40 |
+
],
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@router.post("/organizations/{org_uuid}/chat_conversations")
|
| 45 |
+
def mock_create_conversation(org_uuid: str) -> dict:
|
| 46 |
+
"""_post_create_conversation 调用的创建会话接口。"""
|
| 47 |
+
return {
|
| 48 |
+
"uuid": str(uuid_mod.uuid4()),
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _read_reply_from_stdin() -> str:
|
| 53 |
+
"""在终端通过多次 input 读取多行回复内容(空行结束,阻塞,应在线程中调用)。"""
|
| 54 |
+
print(INPUT_PROMPT, flush=True)
|
| 55 |
+
print("直接粘贴多行文本,最后再按一次回车输入空行结束。", flush=True)
|
| 56 |
+
lines: list[str] = []
|
| 57 |
+
while True:
|
| 58 |
+
try:
|
| 59 |
+
line = input()
|
| 60 |
+
except EOFError:
|
| 61 |
+
break
|
| 62 |
+
# 空行表示输入结束
|
| 63 |
+
if line == "":
|
| 64 |
+
break
|
| 65 |
+
lines.append(line)
|
| 66 |
+
return "\n".join(lines).rstrip()
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
@router.post("/organizations/{org_uuid}/chat_conversations/{conv_uuid}/completion")
|
| 70 |
+
async def mock_completion(
|
| 71 |
+
org_uuid: str,
|
| 72 |
+
conv_uuid: str, # noqa: ARG001
|
| 73 |
+
) -> StreamingResponse:
|
| 74 |
+
"""stream_completion 调用的 completion 接口,返回 SSE 流。请求来时在终端 input 输入回复内容。"""
|
| 75 |
+
|
| 76 |
+
# 在线程中执行 input,避免阻塞事件循环
|
| 77 |
+
reply_text = await asyncio.to_thread(_read_reply_from_stdin)
|
| 78 |
+
|
| 79 |
+
async def sse_stream() -> AsyncIterator[str]:
|
| 80 |
+
msg_uuid = str(uuid_mod.uuid4())
|
| 81 |
+
# message_start
|
| 82 |
+
yield f"data: {json.dumps({'type': 'message_start', 'message': {'id': msg_uuid, 'uuid': msg_uuid, 'model': 'claude-sonnet-4-5-20250929', 'type': 'message', 'role': 'assistant'}})}\n\n"
|
| 83 |
+
# content_block_start
|
| 84 |
+
yield f"data: {json.dumps({'type': 'content_block_start', 'index': 0, 'content_block': {'type': 'text', 'text': ''}})}\n\n"
|
| 85 |
+
# content_block_delta 分块流式输出
|
| 86 |
+
chunk_size = 2
|
| 87 |
+
for i in range(0, len(reply_text), chunk_size):
|
| 88 |
+
chunk = reply_text[i : i + chunk_size]
|
| 89 |
+
yield f"data: {json.dumps({'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': chunk}})}\n\n"
|
| 90 |
+
await asyncio.sleep(0.05)
|
| 91 |
+
# content_block_stop
|
| 92 |
+
yield f"data: {json.dumps({'type': 'content_block_stop', 'index': 0})}\n\n"
|
| 93 |
+
# message_stop
|
| 94 |
+
yield f"data: {json.dumps({'type': 'message_stop'})}\n\n"
|
| 95 |
+
|
| 96 |
+
return StreamingResponse(
|
| 97 |
+
sse_stream(),
|
| 98 |
+
media_type="text/event-stream",
|
| 99 |
+
headers={
|
| 100 |
+
"Cache-Control": "no-cache",
|
| 101 |
+
"Connection": "keep-alive",
|
| 102 |
+
"X-Accel-Buffering": "no",
|
| 103 |
+
},
|
| 104 |
+
)
|
core/api/react.py
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ReAct 模块:解析 LLM 纯文本输出(Thought/Action/Action Input),转换为 function_call 格式。
|
| 3 |
+
适用于不支持 function calling 的 LLM。提示词借鉴 Dify ReAct 结构与表述,保持行式格式。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import json
|
| 7 |
+
import re
|
| 8 |
+
from typing import Any
|
| 9 |
+
|
| 10 |
+
# 复用 function_call 的工具描述格式化
|
| 11 |
+
from core.api.function_call import format_tools_for_prompt
|
| 12 |
+
|
| 13 |
+
# 固定 ReAct 提示词(借鉴 Dify ReAct 结构与表述,保持行式格式以兼容 parse_react_output)
|
| 14 |
+
REACT_PROMPT_FIXED = r"""Respond to the human as helpfully and accurately as possible.
|
| 15 |
+
|
| 16 |
+
You have access to the following tools (listed below under "## Available tools").
|
| 17 |
+
|
| 18 |
+
Use the following format:
|
| 19 |
+
|
| 20 |
+
Question: the input question you must answer
|
| 21 |
+
Thought: consider what you know and what to do next
|
| 22 |
+
Action: the tool name (exactly one of the tools listed below)
|
| 23 |
+
Action Input: a single-line JSON object as the tool input
|
| 24 |
+
Observation: the result of the action (injected by the system — do NOT output this yourself)
|
| 25 |
+
... (repeat Thought / Action / Action Input as needed; after each, the system adds Observation)
|
| 26 |
+
Thought: I know the final answer
|
| 27 |
+
Final Answer: your final response to the human
|
| 28 |
+
|
| 29 |
+
Provide only ONE action per response. Valid "Action" values: a tool name from the list, or (when done) output "Final Answer" / "最终答案" instead of Action + Action Input.
|
| 30 |
+
|
| 31 |
+
Rules:
|
| 32 |
+
- After "Action Input: {...}" you must STOP and wait for Observation. Do not add any text, code, or explanation after the JSON line.
|
| 33 |
+
- Action Input must be a single-line valid JSON. All double quotes `"` in JSON values must be escaped as `\"`. Do not output "Observation" yourself.
|
| 34 |
+
- Format is: Thought → Action → Action Input (or Final Answer when done). Then the system replies with Observation.
|
| 35 |
+
|
| 36 |
+
Begin. Always respond with a valid Thought then Action then Action Input (or Final Answer). Use tools when necessary; respond with Final Answer when appropriate.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def format_react_prompt(
|
| 41 |
+
tools: list[dict[str, Any]],
|
| 42 |
+
tools_text: str | None = None,
|
| 43 |
+
) -> str:
|
| 44 |
+
"""用固定 ReAct 提示词构建系统前缀,并拼接可用工具列表。"""
|
| 45 |
+
if tools_text is None:
|
| 46 |
+
tools_text = format_tools_for_prompt(tools)
|
| 47 |
+
return REACT_PROMPT_FIXED + "\n\n---\n\n## Available tools\n\n" + tools_text + "\n"
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def parse_react_output(text: str) -> dict[str, Any] | None:
|
| 51 |
+
"""
|
| 52 |
+
解析行式 ReAct 输出 (Thought / Action / Action Input)。
|
| 53 |
+
返回 {"type": "final_answer", "content": str} 或
|
| 54 |
+
{"type": "tool_call", "tool": str, "params": dict} 或 None(解析失败)。
|
| 55 |
+
注意:优先解析 Action,若同时存在 Action 与 Final Answer,则返回 tool_call,
|
| 56 |
+
以便正确下发 tool_calls 给客户端执行。
|
| 57 |
+
"""
|
| 58 |
+
if not text or not text.strip():
|
| 59 |
+
return None
|
| 60 |
+
|
| 61 |
+
# 1. 优先提取 Action + Action Input(若存在则返回 tool_call,避免被 Final Answer 抢先)
|
| 62 |
+
action_match = re.search(r"^\s*Action[::]\s*(\w+)", text, re.MULTILINE)
|
| 63 |
+
if action_match:
|
| 64 |
+
tool_name = action_match.group(1).strip()
|
| 65 |
+
|
| 66 |
+
# 2. 提取 Action Input(单行 JSON 或简单多行)
|
| 67 |
+
input_match = re.search(r"Action Input[::]\s*(\{[^\n]+\})", text)
|
| 68 |
+
json_str: str | None = None
|
| 69 |
+
if input_match:
|
| 70 |
+
json_str = input_match.group(1).strip()
|
| 71 |
+
else:
|
| 72 |
+
# 多行 JSON:从 Action Input 到下一关键字
|
| 73 |
+
start_m = re.search(r"Action Input[::]\s*", text)
|
| 74 |
+
if start_m:
|
| 75 |
+
rest = text[start_m.end() :]
|
| 76 |
+
end_m = re.search(
|
| 77 |
+
r"\n\s*(?:Thought|Action|Observation|Final)", rest, re.I
|
| 78 |
+
)
|
| 79 |
+
raw = rest[: end_m.start()].strip() if end_m else rest.strip()
|
| 80 |
+
if raw.startswith("{") and "}" in raw:
|
| 81 |
+
depth = 0
|
| 82 |
+
for i, c in enumerate(raw):
|
| 83 |
+
if c == "{":
|
| 84 |
+
depth += 1
|
| 85 |
+
elif c == "}":
|
| 86 |
+
depth -= 1
|
| 87 |
+
if depth == 0:
|
| 88 |
+
json_str = raw[: i + 1]
|
| 89 |
+
break
|
| 90 |
+
|
| 91 |
+
if not json_str:
|
| 92 |
+
return {
|
| 93 |
+
"type": "tool_call",
|
| 94 |
+
"tool": tool_name,
|
| 95 |
+
"params": {},
|
| 96 |
+
"parse_error": "no_action_input",
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
try:
|
| 100 |
+
params = json.loads(json_str)
|
| 101 |
+
except json.JSONDecodeError as e:
|
| 102 |
+
return {
|
| 103 |
+
"type": "tool_call",
|
| 104 |
+
"tool": tool_name,
|
| 105 |
+
"params": {},
|
| 106 |
+
"parse_error": str(e),
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
return {"type": "tool_call", "tool": tool_name, "params": params}
|
| 110 |
+
|
| 111 |
+
# 3. 无 Action 时,检查 Final Answer
|
| 112 |
+
m = re.search(
|
| 113 |
+
r"(?:Final Answer|最终答案)[::]\s*(.*)",
|
| 114 |
+
text,
|
| 115 |
+
re.DOTALL | re.I,
|
| 116 |
+
)
|
| 117 |
+
if m:
|
| 118 |
+
content = m.group(1).strip()
|
| 119 |
+
return {"type": "final_answer", "content": content}
|
| 120 |
+
|
| 121 |
+
return None
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def react_output_to_tool_calls(parsed: dict[str, Any]) -> list[dict[str, Any]]:
|
| 125 |
+
"""
|
| 126 |
+
将 parse_react_output 的 tool_call 结果转为 function_call 的 tool_calls_list 格式。
|
| 127 |
+
供 build_tool_calls_response / build_tool_calls_chunk 使用。
|
| 128 |
+
"""
|
| 129 |
+
if parsed.get("type") != "tool_call":
|
| 130 |
+
return []
|
| 131 |
+
return [
|
| 132 |
+
{
|
| 133 |
+
"name": parsed.get("tool", ""),
|
| 134 |
+
"arguments": parsed.get("params", {}),
|
| 135 |
+
}
|
| 136 |
+
]
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def format_react_final_answer_content(text: str) -> str:
|
| 140 |
+
"""
|
| 141 |
+
若 text 为 ReAct 的 Thought + Final Answer 格式,则将 Thought 用 <think> 包裹,
|
| 142 |
+
便于客户端识别为思考内容;否则返回原文本。
|
| 143 |
+
"""
|
| 144 |
+
if not text or not text.strip():
|
| 145 |
+
return text
|
| 146 |
+
# 匹配 Thought: ... 与 Final Answer: / 最终答案: ...
|
| 147 |
+
thought_m = re.search(
|
| 148 |
+
r"Thought[::]\s*(.+?)(?=\s*(?:Final Answer|最终答案)[::]|\Z)",
|
| 149 |
+
text,
|
| 150 |
+
re.DOTALL | re.I,
|
| 151 |
+
)
|
| 152 |
+
answer_m = re.search(
|
| 153 |
+
r"(?:Final Answer|最终答案)[::]\s*(.*)",
|
| 154 |
+
text,
|
| 155 |
+
re.DOTALL | re.I,
|
| 156 |
+
)
|
| 157 |
+
if thought_m and answer_m:
|
| 158 |
+
thought = (thought_m.group(1) or "").strip()
|
| 159 |
+
answer = (answer_m.group(1) or "").strip()
|
| 160 |
+
return f"<think>{thought}</think>\n\n{answer}"
|
| 161 |
+
return text
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def extract_thought_so_far(buffer: str) -> tuple[str | None, bool]:
|
| 165 |
+
"""
|
| 166 |
+
从流式 buffer 中增量解析当前 Thought 内容(Thought: 到 Action:/Final Answer:/结尾)。
|
| 167 |
+
返回 (thought_content, thought_ended)。
|
| 168 |
+
- thought_content: 当前可确定的 Thought 正文(不含 "Thought:" 前缀),未出现 Thought: 则为 None。
|
| 169 |
+
- thought_ended: 是否已出现 Action: 或 Final Answer:,即 Thought 段已结束。
|
| 170 |
+
"""
|
| 171 |
+
content = buffer.lstrip()
|
| 172 |
+
if not content:
|
| 173 |
+
return (None, False)
|
| 174 |
+
# 必须已有 Thought:
|
| 175 |
+
thought_start = re.search(r"Thought[::]\s*", content, re.I)
|
| 176 |
+
if not thought_start:
|
| 177 |
+
return (None, False)
|
| 178 |
+
start = thought_start.end()
|
| 179 |
+
rest = content[start:]
|
| 180 |
+
# 先找完整结尾:Action: 或 Final Answer:(一出现就截断,不要求后面已有工具名)
|
| 181 |
+
action_m = re.search(r"Action[::]\s*", rest, re.I)
|
| 182 |
+
final_m = re.search(r"(?:Final Answer|最终答案)[::]\s*", rest, re.I)
|
| 183 |
+
end_pos: int | None = None
|
| 184 |
+
if action_m and (final_m is None or action_m.start() <= final_m.start()):
|
| 185 |
+
end_pos = action_m.start()
|
| 186 |
+
if final_m and (end_pos is None or final_m.start() < end_pos):
|
| 187 |
+
end_pos = final_m.start()
|
| 188 |
+
if end_pos is not None:
|
| 189 |
+
thought_content = rest[:end_pos].rstrip()
|
| 190 |
+
return (thought_content, True)
|
| 191 |
+
# 未出现完整关键字时,去掉末尾「可能是关键字前缀」的片段,避免把 "\nAc"、"tion:"、"r:"、" [完整回答]" 等当 thought 流式发出
|
| 192 |
+
thought_content = rest.rstrip()
|
| 193 |
+
for kw in ("Action:", "Final Answer:", "最终答案:"):
|
| 194 |
+
for i in range(len(kw), 0, -1):
|
| 195 |
+
if thought_content.lower().endswith(kw[:i].lower()):
|
| 196 |
+
thought_content = thought_content[:-i].rstrip()
|
| 197 |
+
break
|
| 198 |
+
# 再剥 "Final Answer:" 的尾部片段(流式时先收到 "Answer:"、"r:" 等),避免 [完整回答] 被算进 think
|
| 199 |
+
for suffix in (
|
| 200 |
+
" Final Answer:",
|
| 201 |
+
" Final Answer",
|
| 202 |
+
" Answer:",
|
| 203 |
+
" Answer",
|
| 204 |
+
"Answer:",
|
| 205 |
+
"Answer",
|
| 206 |
+
"nswer:",
|
| 207 |
+
"nswer",
|
| 208 |
+
"swer:",
|
| 209 |
+
"swer",
|
| 210 |
+
"wer:",
|
| 211 |
+
"wer",
|
| 212 |
+
"er:",
|
| 213 |
+
"er",
|
| 214 |
+
"r:",
|
| 215 |
+
"r",
|
| 216 |
+
):
|
| 217 |
+
if thought_content.endswith(suffix):
|
| 218 |
+
thought_content = thought_content[: -len(suffix)].rstrip()
|
| 219 |
+
break
|
| 220 |
+
return (thought_content, False)
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def detect_react_mode(buffer: str) -> bool | None:
|
| 224 |
+
"""
|
| 225 |
+
判断 buffer 是否为 ReAct 工具调用模式(规范格式:Thought:/Action:/Action Input:)。
|
| 226 |
+
仅当出现该格式时才识别为 ReAct;未按规范返回一律视为纯文本。
|
| 227 |
+
None=尚未确定,True=ReAct 工具调用,False=普通文本或 Final Answer。
|
| 228 |
+
"""
|
| 229 |
+
stripped = buffer.lstrip()
|
| 230 |
+
if re.search(r"^\s*Action[::]\s*\w+", stripped, re.MULTILINE):
|
| 231 |
+
return True
|
| 232 |
+
if re.search(r"(?:Final Answer|最终答案)[::]", stripped, re.I):
|
| 233 |
+
return False
|
| 234 |
+
# 流式可能只传 Thought/Action 的前半段(如 "Th"、"Tho"),视为尚未确定,继续缓冲
|
| 235 |
+
lower = stripped.lower()
|
| 236 |
+
if lower and ("thought:".startswith(lower) or "action:".startswith(lower)):
|
| 237 |
+
return None
|
| 238 |
+
# 若 buffer 中已出现 Thought:,可能为前导语 + Thought 格式(第二轮常见),保持 None 等待 Action
|
| 239 |
+
if re.search(r"Thought[::]\s*", stripped, re.I):
|
| 240 |
+
return None
|
| 241 |
+
# 未按规范:首行不是 Thought:/Action: 开头则视为纯文本
|
| 242 |
+
if stripped and not re.match(r"^\s*(?:Thought|Action)[::]", stripped, re.I):
|
| 243 |
+
return False
|
| 244 |
+
return None
|
core/api/react_stream_parser.py
ADDED
|
@@ -0,0 +1,435 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ReAct 流式解析器:字符级 MarkerDetector + StateMachine
|
| 3 |
+
|
| 4 |
+
将 LLM 的 ReAct 格式文本实时转换为 OpenAI SSE 流式事件:
|
| 5 |
+
|
| 6 |
+
Thought: xxx → delta.content = "<think>xxx</think>" (流式)
|
| 7 |
+
Action: name → 缓存工具名
|
| 8 |
+
Action Input: {} → delta.tool_calls[0].function.arguments (流式)
|
| 9 |
+
Final Answer: xxx → delta.content = "xxx" (流式)
|
| 10 |
+
Observation: xxx → delta.content = "xxx" (流式)
|
| 11 |
+
无标记文本 → delta.content = "xxx" (直通)
|
| 12 |
+
|
| 13 |
+
核心设计:
|
| 14 |
+
MarkerDetector:默认零延迟直通,仅在遇到 Marker 首字母时暂存等待确认。
|
| 15 |
+
StateMachine:IDLE / IN_THOUGHT / IN_ACTION / IN_ACTION_INPUT /
|
| 16 |
+
IN_OBSERVATION / IN_FINAL
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
import json
|
| 20 |
+
import uuid
|
| 21 |
+
from enum import Enum, auto
|
| 22 |
+
|
| 23 |
+
# ─── Marker 定义 ──────────────────────────────────────────────────────────────
|
| 24 |
+
|
| 25 |
+
# 注意顺序:仅影响精确匹配时的遍历,不影响正确性(每个 marker 唯一)
|
| 26 |
+
_MARKERS: tuple[str, ...] = (
|
| 27 |
+
"Thought:",
|
| 28 |
+
"Action Input:", # 必须比 "Action:" 先定义(_is_prefix 依赖全集)
|
| 29 |
+
"Action:",
|
| 30 |
+
"Observation:",
|
| 31 |
+
"Final Answer:",
|
| 32 |
+
"最终答案:",
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
_MARKER_FIRST_CHARS: frozenset[str] = frozenset(m[0] for m in _MARKERS)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# ─── 状态枚举 ─────────────────────────────────────────────────────────────────
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class _State(Enum):
|
| 42 |
+
IDLE = auto()
|
| 43 |
+
IN_THOUGHT = auto()
|
| 44 |
+
IN_ACTION = auto()
|
| 45 |
+
IN_ACTION_INPUT = auto()
|
| 46 |
+
IN_OBSERVATION = auto()
|
| 47 |
+
IN_FINAL = auto()
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# ─── 解析器主体 ───────────────────────────────────────────────────────────────
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class ReactStreamParser:
|
| 54 |
+
"""
|
| 55 |
+
字符级 ReAct 流解析器,将 LLM 的 ReAct 格式输出转换为 OpenAI SSE chunks。
|
| 56 |
+
|
| 57 |
+
用法::
|
| 58 |
+
|
| 59 |
+
parser = ReactStreamParser(chat_id, model, created, has_tools=True)
|
| 60 |
+
async for chunk in llm_stream:
|
| 61 |
+
# 注意:不要对 chunk 做 strip_session_id_suffix,否则客户端收不到会话 ID,下一轮无法复用会话
|
| 62 |
+
for sse in parser.feed(chunk):
|
| 63 |
+
yield sse
|
| 64 |
+
for sse in parser.finish():
|
| 65 |
+
yield sse
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
def __init__(
|
| 69 |
+
self,
|
| 70 |
+
chat_id: str,
|
| 71 |
+
model: str,
|
| 72 |
+
created: int,
|
| 73 |
+
*,
|
| 74 |
+
has_tools: bool = True,
|
| 75 |
+
) -> None:
|
| 76 |
+
self._chat_id = chat_id
|
| 77 |
+
self._model = model
|
| 78 |
+
self._created = created
|
| 79 |
+
self._has_tools = has_tools
|
| 80 |
+
|
| 81 |
+
# MarkerDetector 状态
|
| 82 |
+
self._suspect_buf = ""
|
| 83 |
+
self._skip_leading_ws = False # 吃掉 Marker 冒号后的空白
|
| 84 |
+
|
| 85 |
+
# StateMachine 状态
|
| 86 |
+
self._state = _State.IDLE
|
| 87 |
+
self._action_name_buf = "" # 收集 Action 名称
|
| 88 |
+
self._tool_call_id = ""
|
| 89 |
+
self._tool_call_index = 0
|
| 90 |
+
|
| 91 |
+
# 输出控制标志
|
| 92 |
+
self._emitted_msg_start = False
|
| 93 |
+
self._think_open = False # 已发 <think>
|
| 94 |
+
self._think_closed = False # 已发 </think>
|
| 95 |
+
self._tool_call_started = False # 已发 function_call_start
|
| 96 |
+
|
| 97 |
+
# ── 公开 API ──────────────────────────────────────────────────────────────
|
| 98 |
+
|
| 99 |
+
def feed(self, chunk: str) -> list[str]:
|
| 100 |
+
"""处理一个文本 chunk,返回需要下发的 SSE 字符串列表(含 `data: ...\\n\\n`)。"""
|
| 101 |
+
events: list[str] = []
|
| 102 |
+
for char in chunk:
|
| 103 |
+
events.extend(self._on_char(char))
|
| 104 |
+
return events
|
| 105 |
+
|
| 106 |
+
def finish(self) -> list[str]:
|
| 107 |
+
"""LLM 流结束时调用:flush 残留 suspect_buf,补发结束 SSE。"""
|
| 108 |
+
events: list[str] = []
|
| 109 |
+
if self._suspect_buf:
|
| 110 |
+
buf, self._suspect_buf = self._suspect_buf, ""
|
| 111 |
+
events.extend(self._dispatch(buf))
|
| 112 |
+
events.extend(self._emit_end())
|
| 113 |
+
return events
|
| 114 |
+
|
| 115 |
+
# ── 字符级处理(MarkerDetector)──────────────────────────────────────────
|
| 116 |
+
|
| 117 |
+
def _on_char(self, char: str) -> list[str]:
|
| 118 |
+
# 吃掉 Marker 冒号后的单个/连续空格或制表符
|
| 119 |
+
if self._skip_leading_ws:
|
| 120 |
+
if char in (" ", "\t"):
|
| 121 |
+
return []
|
| 122 |
+
self._skip_leading_ws = False
|
| 123 |
+
|
| 124 |
+
# 无工具:全部直通为纯文本
|
| 125 |
+
if not self._has_tools:
|
| 126 |
+
return self._dispatch(char)
|
| 127 |
+
|
| 128 |
+
if not self._suspect_buf:
|
| 129 |
+
if char in _MARKER_FIRST_CHARS:
|
| 130 |
+
self._suspect_buf = char
|
| 131 |
+
return []
|
| 132 |
+
return self._dispatch(char)
|
| 133 |
+
|
| 134 |
+
# 正在疑似 Marker
|
| 135 |
+
self._suspect_buf += char
|
| 136 |
+
|
| 137 |
+
matched = self._exact_match()
|
| 138 |
+
if matched:
|
| 139 |
+
events = self._on_marker(matched)
|
| 140 |
+
self._suspect_buf = ""
|
| 141 |
+
return events
|
| 142 |
+
|
| 143 |
+
if self._is_prefix():
|
| 144 |
+
return [] # 继续积累,等待确认
|
| 145 |
+
|
| 146 |
+
# 排除歧义:flush suspect_buf 作为普通内容
|
| 147 |
+
buf, self._suspect_buf = self._suspect_buf, ""
|
| 148 |
+
return self._dispatch(buf)
|
| 149 |
+
|
| 150 |
+
def _exact_match(self) -> str | None:
|
| 151 |
+
for m in _MARKERS:
|
| 152 |
+
if self._suspect_buf == m:
|
| 153 |
+
return m
|
| 154 |
+
return None
|
| 155 |
+
|
| 156 |
+
def _is_prefix(self) -> bool:
|
| 157 |
+
return any(m.startswith(self._suspect_buf) for m in _MARKERS)
|
| 158 |
+
|
| 159 |
+
# ── Marker 触发(状态转换)────────────────────────────────────────────────
|
| 160 |
+
|
| 161 |
+
def _on_marker(self, marker: str) -> list[str]:
|
| 162 |
+
events: list[str] = []
|
| 163 |
+
events.extend(self._exit_state())
|
| 164 |
+
|
| 165 |
+
if marker == "Thought:":
|
| 166 |
+
self._state = _State.IN_THOUGHT
|
| 167 |
+
events.extend(self._enter_thought())
|
| 168 |
+
|
| 169 |
+
elif marker == "Action:":
|
| 170 |
+
self._state = _State.IN_ACTION
|
| 171 |
+
self._action_name_buf = ""
|
| 172 |
+
|
| 173 |
+
elif marker == "Action Input:":
|
| 174 |
+
# 若 Action 名后没有 \n(罕见),在此兜底触发 function_call_start
|
| 175 |
+
if not self._tool_call_started:
|
| 176 |
+
events.extend(self._start_function_call())
|
| 177 |
+
self._state = _State.IN_ACTION_INPUT
|
| 178 |
+
|
| 179 |
+
elif marker == "Observation:":
|
| 180 |
+
self._state = _State.IN_OBSERVATION
|
| 181 |
+
|
| 182 |
+
elif marker in ("Final Answer:", "最终答案:"):
|
| 183 |
+
self._state = _State.IN_FINAL
|
| 184 |
+
events.extend(self._enter_final())
|
| 185 |
+
|
| 186 |
+
self._skip_leading_ws = True # 跳过 Marker 冒号后的空白
|
| 187 |
+
return events
|
| 188 |
+
|
| 189 |
+
def _exit_state(self) -> list[str]:
|
| 190 |
+
"""离开当前状态时的收尾动作。"""
|
| 191 |
+
events: list[str] = []
|
| 192 |
+
if self._state == _State.IN_THOUGHT:
|
| 193 |
+
if self._think_open and not self._think_closed:
|
| 194 |
+
self._think_closed = True
|
| 195 |
+
events.extend(self._make_content("</think>"))
|
| 196 |
+
return events
|
| 197 |
+
|
| 198 |
+
# ── 状态进入 ──────────────────────────────────────────────────────────────
|
| 199 |
+
|
| 200 |
+
def _enter_thought(self) -> list[str]:
|
| 201 |
+
events: list[str] = []
|
| 202 |
+
if not self._emitted_msg_start:
|
| 203 |
+
events.extend(self._emit_msg_start())
|
| 204 |
+
# 每次进入 IN_THOUGHT 都开一个新的 <think> 块(支持多轮)
|
| 205 |
+
self._think_open = True
|
| 206 |
+
self._think_closed = False
|
| 207 |
+
events.extend(self._make_content("<think>"))
|
| 208 |
+
return events
|
| 209 |
+
|
| 210 |
+
def _enter_final(self) -> list[str]:
|
| 211 |
+
events: list[str] = []
|
| 212 |
+
if not self._emitted_msg_start:
|
| 213 |
+
events.extend(self._emit_msg_start())
|
| 214 |
+
return events
|
| 215 |
+
|
| 216 |
+
def _start_function_call(self) -> list[str]:
|
| 217 |
+
"""Action 名收集完毕,发送 function_call_start。"""
|
| 218 |
+
name = self._action_name_buf.strip()
|
| 219 |
+
self._tool_call_id = f"call_{uuid.uuid4().hex[:8]}"
|
| 220 |
+
self._tool_call_started = True
|
| 221 |
+
events: list[str] = []
|
| 222 |
+
if not self._emitted_msg_start:
|
| 223 |
+
events.extend(self._emit_msg_start())
|
| 224 |
+
events.extend(self._make_tool_call_start(name))
|
| 225 |
+
return events
|
| 226 |
+
|
| 227 |
+
# ── 内容分发(根据当前状态路由字符/字符串)──────────────────────────────────
|
| 228 |
+
|
| 229 |
+
def _dispatch(self, text: str) -> list[str]:
|
| 230 |
+
"""将 text 按当前状态路由到对应的输出动作。"""
|
| 231 |
+
s = self._state
|
| 232 |
+
events: list[str] = []
|
| 233 |
+
|
| 234 |
+
if s == _State.IDLE:
|
| 235 |
+
if not self._emitted_msg_start:
|
| 236 |
+
events.extend(self._emit_msg_start())
|
| 237 |
+
events.extend(self._make_content(text))
|
| 238 |
+
|
| 239 |
+
elif s == _State.IN_THOUGHT:
|
| 240 |
+
if not self._think_open:
|
| 241 |
+
# 安全兜底:进入 IN_THOUGHT 时通常已调用 _enter_thought,此处防御
|
| 242 |
+
events.extend(self._enter_thought())
|
| 243 |
+
events.extend(self._make_content(text))
|
| 244 |
+
|
| 245 |
+
elif s == _State.IN_ACTION:
|
| 246 |
+
# 逐字收集 action 名,遇换行触发 function_call_start
|
| 247 |
+
for ch in text:
|
| 248 |
+
if ch == "\n":
|
| 249 |
+
if self._action_name_buf.strip() and not self._tool_call_started:
|
| 250 |
+
events.extend(self._start_function_call())
|
| 251 |
+
else:
|
| 252 |
+
self._action_name_buf += ch
|
| 253 |
+
|
| 254 |
+
elif s == _State.IN_ACTION_INPUT:
|
| 255 |
+
if self._tool_call_started:
|
| 256 |
+
events.extend(self._make_tool_args(text))
|
| 257 |
+
|
| 258 |
+
elif s == _State.IN_OBSERVATION:
|
| 259 |
+
# Observation 内容作为普通文本流输出
|
| 260 |
+
if not self._emitted_msg_start:
|
| 261 |
+
events.extend(self._emit_msg_start())
|
| 262 |
+
events.extend(self._make_content(text))
|
| 263 |
+
|
| 264 |
+
elif s == _State.IN_FINAL:
|
| 265 |
+
events.extend(self._make_content(text))
|
| 266 |
+
|
| 267 |
+
return events
|
| 268 |
+
|
| 269 |
+
# ── 流结束 ────────────────────────────────────────────────────────────────
|
| 270 |
+
|
| 271 |
+
def _emit_end(self) -> list[str]:
|
| 272 |
+
events: list[str] = []
|
| 273 |
+
|
| 274 |
+
# 关闭未关闭的 <think>
|
| 275 |
+
if self._think_open and not self._think_closed:
|
| 276 |
+
self._think_closed = True
|
| 277 |
+
events.extend(self._make_content("</think>"))
|
| 278 |
+
|
| 279 |
+
if self._tool_call_started:
|
| 280 |
+
events.extend(self._make_tool_calls_finish())
|
| 281 |
+
elif self._emitted_msg_start:
|
| 282 |
+
events.extend(self._make_stop())
|
| 283 |
+
else:
|
| 284 |
+
# 空响应:补齐最小合法 SSE 序列
|
| 285 |
+
events.extend(self._emit_msg_start())
|
| 286 |
+
events.extend(self._make_stop())
|
| 287 |
+
|
| 288 |
+
events.append("data: [DONE]\n\n")
|
| 289 |
+
return events
|
| 290 |
+
|
| 291 |
+
# ── SSE chunk 构造 ─────────────────────────────────────────────────────────
|
| 292 |
+
|
| 293 |
+
def _emit_msg_start(self) -> list[str]:
|
| 294 |
+
"""发送 role:assistant + content:"" 的首帧。"""
|
| 295 |
+
self._emitted_msg_start = True
|
| 296 |
+
return [
|
| 297 |
+
self._sse(
|
| 298 |
+
{
|
| 299 |
+
"id": self._chat_id,
|
| 300 |
+
"object": "chat.completion.chunk",
|
| 301 |
+
"created": self._created,
|
| 302 |
+
"model": self._model,
|
| 303 |
+
"choices": [
|
| 304 |
+
{
|
| 305 |
+
"index": 0,
|
| 306 |
+
"delta": {"role": "assistant", "content": ""},
|
| 307 |
+
"logprobs": None,
|
| 308 |
+
"finish_reason": None,
|
| 309 |
+
}
|
| 310 |
+
],
|
| 311 |
+
}
|
| 312 |
+
)
|
| 313 |
+
]
|
| 314 |
+
|
| 315 |
+
def _make_content(self, text: str) -> list[str]:
|
| 316 |
+
return [
|
| 317 |
+
self._sse(
|
| 318 |
+
{
|
| 319 |
+
"id": self._chat_id,
|
| 320 |
+
"object": "chat.completion.chunk",
|
| 321 |
+
"created": self._created,
|
| 322 |
+
"model": self._model,
|
| 323 |
+
"choices": [
|
| 324 |
+
{
|
| 325 |
+
"index": 0,
|
| 326 |
+
"delta": {"content": text},
|
| 327 |
+
"logprobs": None,
|
| 328 |
+
"finish_reason": None,
|
| 329 |
+
}
|
| 330 |
+
],
|
| 331 |
+
}
|
| 332 |
+
)
|
| 333 |
+
]
|
| 334 |
+
|
| 335 |
+
def _make_tool_call_start(self, name: str) -> list[str]:
|
| 336 |
+
"""发送 function_call_start:携带 id、type、name 和空 arguments。"""
|
| 337 |
+
return [
|
| 338 |
+
self._sse(
|
| 339 |
+
{
|
| 340 |
+
"id": self._chat_id,
|
| 341 |
+
"object": "chat.completion.chunk",
|
| 342 |
+
"created": self._created,
|
| 343 |
+
"model": self._model,
|
| 344 |
+
"choices": [
|
| 345 |
+
{
|
| 346 |
+
"index": 0,
|
| 347 |
+
"delta": {
|
| 348 |
+
"tool_calls": [
|
| 349 |
+
{
|
| 350 |
+
"index": self._tool_call_index,
|
| 351 |
+
"id": self._tool_call_id,
|
| 352 |
+
"type": "function",
|
| 353 |
+
"function": {"name": name, "arguments": ""},
|
| 354 |
+
}
|
| 355 |
+
]
|
| 356 |
+
},
|
| 357 |
+
"logprobs": None,
|
| 358 |
+
"finish_reason": None,
|
| 359 |
+
}
|
| 360 |
+
],
|
| 361 |
+
}
|
| 362 |
+
)
|
| 363 |
+
]
|
| 364 |
+
|
| 365 |
+
def _make_tool_args(self, delta: str) -> list[str]:
|
| 366 |
+
"""逐字发送 arguments 增量。"""
|
| 367 |
+
return [
|
| 368 |
+
self._sse(
|
| 369 |
+
{
|
| 370 |
+
"id": self._chat_id,
|
| 371 |
+
"object": "chat.completion.chunk",
|
| 372 |
+
"created": self._created,
|
| 373 |
+
"model": self._model,
|
| 374 |
+
"choices": [
|
| 375 |
+
{
|
| 376 |
+
"index": 0,
|
| 377 |
+
"delta": {
|
| 378 |
+
"tool_calls": [
|
| 379 |
+
{
|
| 380 |
+
"index": self._tool_call_index,
|
| 381 |
+
"function": {"arguments": delta},
|
| 382 |
+
}
|
| 383 |
+
]
|
| 384 |
+
},
|
| 385 |
+
"logprobs": None,
|
| 386 |
+
"finish_reason": None,
|
| 387 |
+
}
|
| 388 |
+
],
|
| 389 |
+
}
|
| 390 |
+
)
|
| 391 |
+
]
|
| 392 |
+
|
| 393 |
+
def _make_tool_calls_finish(self) -> list[str]:
|
| 394 |
+
return [
|
| 395 |
+
self._sse(
|
| 396 |
+
{
|
| 397 |
+
"id": self._chat_id,
|
| 398 |
+
"object": "chat.completion.chunk",
|
| 399 |
+
"created": self._created,
|
| 400 |
+
"model": self._model,
|
| 401 |
+
"choices": [
|
| 402 |
+
{
|
| 403 |
+
"index": 0,
|
| 404 |
+
"delta": {},
|
| 405 |
+
"logprobs": None,
|
| 406 |
+
"finish_reason": "tool_calls",
|
| 407 |
+
}
|
| 408 |
+
],
|
| 409 |
+
}
|
| 410 |
+
)
|
| 411 |
+
]
|
| 412 |
+
|
| 413 |
+
def _make_stop(self) -> list[str]:
|
| 414 |
+
return [
|
| 415 |
+
self._sse(
|
| 416 |
+
{
|
| 417 |
+
"id": self._chat_id,
|
| 418 |
+
"object": "chat.completion.chunk",
|
| 419 |
+
"created": self._created,
|
| 420 |
+
"model": self._model,
|
| 421 |
+
"choices": [
|
| 422 |
+
{
|
| 423 |
+
"index": 0,
|
| 424 |
+
"delta": {},
|
| 425 |
+
"logprobs": None,
|
| 426 |
+
"finish_reason": "stop",
|
| 427 |
+
}
|
| 428 |
+
],
|
| 429 |
+
}
|
| 430 |
+
)
|
| 431 |
+
]
|
| 432 |
+
|
| 433 |
+
@staticmethod
|
| 434 |
+
def _sse(obj: dict) -> str:
|
| 435 |
+
return f"data: {json.dumps(obj, ensure_ascii=False)}\n\n"
|
core/api/routes.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
OpenAI 协议路由。
|
| 3 |
+
|
| 4 |
+
支持:
|
| 5 |
+
- /openai/{provider}/v1/chat/completions
|
| 6 |
+
- /openai/{provider}/v1/models
|
| 7 |
+
- 旧路径 /{provider}/v1/...(等价于 OpenAI 协议)
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import json
|
| 11 |
+
import time
|
| 12 |
+
from collections.abc import AsyncIterator
|
| 13 |
+
from typing import Any
|
| 14 |
+
|
| 15 |
+
from fastapi import APIRouter, Depends, HTTPException, Request
|
| 16 |
+
from fastapi.responses import JSONResponse, StreamingResponse
|
| 17 |
+
|
| 18 |
+
from core.api.auth import require_api_key
|
| 19 |
+
from core.api.chat_handler import ChatHandler
|
| 20 |
+
from core.config.repository import APP_SETTING_ENABLE_PRO_MODELS
|
| 21 |
+
from core.plugin.base import PluginRegistry
|
| 22 |
+
from core.protocol.openai import OpenAIProtocolAdapter
|
| 23 |
+
from core.protocol.schemas import CanonicalChatRequest
|
| 24 |
+
from core.protocol.service import CanonicalChatService
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def get_chat_handler(request: Request) -> ChatHandler:
|
| 28 |
+
"""从 app state 取出 ChatHandler。"""
|
| 29 |
+
handler = getattr(request.app.state, "chat_handler", None)
|
| 30 |
+
if handler is None:
|
| 31 |
+
raise HTTPException(status_code=503, detail="服务未就绪")
|
| 32 |
+
return handler
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def resolve_request_model(
|
| 36 |
+
provider: str,
|
| 37 |
+
canonical_req: CanonicalChatRequest,
|
| 38 |
+
) -> CanonicalChatRequest:
|
| 39 |
+
resolved = PluginRegistry.resolve_model(provider, canonical_req.model)
|
| 40 |
+
canonical_req.model = resolved.public_model
|
| 41 |
+
canonical_req.metadata["upstream_model"] = resolved.upstream_model
|
| 42 |
+
return canonical_req
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def check_pro_model_access(
|
| 46 |
+
request: Request,
|
| 47 |
+
provider: str,
|
| 48 |
+
model: str,
|
| 49 |
+
) -> JSONResponse | None:
|
| 50 |
+
"""Return 403 JSONResponse if model requires Pro and Pro is disabled, else None."""
|
| 51 |
+
plugin = PluginRegistry.get(provider)
|
| 52 |
+
if plugin is None:
|
| 53 |
+
return None
|
| 54 |
+
pro_models = getattr(plugin, "PRO_MODELS", frozenset())
|
| 55 |
+
if model not in pro_models:
|
| 56 |
+
return None
|
| 57 |
+
config_repo = getattr(request.app.state, "config_repo", None)
|
| 58 |
+
if config_repo is None:
|
| 59 |
+
return None
|
| 60 |
+
enabled = config_repo.get_app_setting(APP_SETTING_ENABLE_PRO_MODELS)
|
| 61 |
+
if enabled == "true":
|
| 62 |
+
return None
|
| 63 |
+
return JSONResponse(
|
| 64 |
+
status_code=403,
|
| 65 |
+
content={
|
| 66 |
+
"error": {
|
| 67 |
+
"message": (
|
| 68 |
+
f"Model '{model}' requires a Claude Pro subscription. "
|
| 69 |
+
"Enable Pro models in the config page at /config."
|
| 70 |
+
),
|
| 71 |
+
"type": "model_not_available",
|
| 72 |
+
"code": "pro_model_required",
|
| 73 |
+
}
|
| 74 |
+
},
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def create_router() -> APIRouter:
|
| 79 |
+
"""创建 OpenAI 协议路由。"""
|
| 80 |
+
router = APIRouter(dependencies=[Depends(require_api_key)])
|
| 81 |
+
adapter = OpenAIProtocolAdapter()
|
| 82 |
+
|
| 83 |
+
def _list_models(provider: str) -> dict[str, Any]:
|
| 84 |
+
try:
|
| 85 |
+
metadata = PluginRegistry.model_metadata(provider)
|
| 86 |
+
except ValueError as exc:
|
| 87 |
+
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
| 88 |
+
now = int(time.time())
|
| 89 |
+
return {
|
| 90 |
+
"object": "list",
|
| 91 |
+
"data": [
|
| 92 |
+
{
|
| 93 |
+
"id": mid,
|
| 94 |
+
"object": "model",
|
| 95 |
+
"created": now,
|
| 96 |
+
"owned_by": provider,
|
| 97 |
+
}
|
| 98 |
+
for mid in metadata["public_models"]
|
| 99 |
+
],
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
@router.get("/openai/{provider}/v1/models")
|
| 103 |
+
def list_models(provider: str) -> dict[str, Any]:
|
| 104 |
+
return _list_models(provider)
|
| 105 |
+
|
| 106 |
+
@router.get("/{provider}/v1/models")
|
| 107 |
+
def list_models_legacy(provider: str) -> dict[str, Any]:
|
| 108 |
+
return _list_models(provider)
|
| 109 |
+
|
| 110 |
+
async def _chat_completions(
|
| 111 |
+
provider: str,
|
| 112 |
+
request: Request,
|
| 113 |
+
handler: ChatHandler,
|
| 114 |
+
) -> Any:
|
| 115 |
+
raw_body = await request.json()
|
| 116 |
+
try:
|
| 117 |
+
canonical_req = resolve_request_model(
|
| 118 |
+
provider,
|
| 119 |
+
adapter.parse_request(provider, raw_body),
|
| 120 |
+
)
|
| 121 |
+
except Exception as exc:
|
| 122 |
+
status, payload = adapter.render_error(exc)
|
| 123 |
+
return JSONResponse(status_code=status, content=payload)
|
| 124 |
+
|
| 125 |
+
pro_err = check_pro_model_access(request, provider, canonical_req.model)
|
| 126 |
+
if pro_err is not None:
|
| 127 |
+
return pro_err
|
| 128 |
+
|
| 129 |
+
service = CanonicalChatService(handler)
|
| 130 |
+
if canonical_req.stream:
|
| 131 |
+
|
| 132 |
+
async def sse_stream() -> AsyncIterator[str]:
|
| 133 |
+
try:
|
| 134 |
+
async for event in adapter.render_stream(
|
| 135 |
+
canonical_req,
|
| 136 |
+
service.stream_raw(canonical_req),
|
| 137 |
+
):
|
| 138 |
+
yield event
|
| 139 |
+
except Exception as exc:
|
| 140 |
+
status, payload = adapter.render_error(exc)
|
| 141 |
+
del status
|
| 142 |
+
yield f"data: {json.dumps(payload, ensure_ascii=False)}\n\n"
|
| 143 |
+
|
| 144 |
+
return StreamingResponse(
|
| 145 |
+
sse_stream(),
|
| 146 |
+
media_type="text/event-stream",
|
| 147 |
+
headers={
|
| 148 |
+
"Cache-Control": "no-cache",
|
| 149 |
+
"Connection": "keep-alive",
|
| 150 |
+
"X-Accel-Buffering": "no",
|
| 151 |
+
},
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
try:
|
| 155 |
+
raw_events = await service.collect_raw(canonical_req)
|
| 156 |
+
return adapter.render_non_stream(canonical_req, raw_events)
|
| 157 |
+
except Exception as exc:
|
| 158 |
+
status, payload = adapter.render_error(exc)
|
| 159 |
+
return JSONResponse(status_code=status, content=payload)
|
| 160 |
+
|
| 161 |
+
@router.post("/openai/{provider}/v1/chat/completions")
|
| 162 |
+
async def chat_completions(
|
| 163 |
+
provider: str,
|
| 164 |
+
request: Request,
|
| 165 |
+
handler: ChatHandler = Depends(get_chat_handler),
|
| 166 |
+
) -> Any:
|
| 167 |
+
return await _chat_completions(provider, request, handler)
|
| 168 |
+
|
| 169 |
+
@router.post("/{provider}/v1/chat/completions")
|
| 170 |
+
async def chat_completions_legacy(
|
| 171 |
+
provider: str,
|
| 172 |
+
request: Request,
|
| 173 |
+
handler: ChatHandler = Depends(get_chat_handler),
|
| 174 |
+
) -> Any:
|
| 175 |
+
return await _chat_completions(provider, request, handler)
|
| 176 |
+
|
| 177 |
+
return router
|
core/api/schemas.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""OpenAI 兼容的请求/响应模型。"""
|
| 2 |
+
|
| 3 |
+
from typing import Any
|
| 4 |
+
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
|
| 7 |
+
from core.api.conv_parser import strip_session_id_suffix
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class OpenAIContentPart(BaseModel):
|
| 11 |
+
type: str
|
| 12 |
+
text: str | None = None
|
| 13 |
+
image_url: dict[str, Any] | str | None = None
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class InputAttachment(BaseModel):
|
| 17 |
+
filename: str
|
| 18 |
+
mime_type: str
|
| 19 |
+
data: bytes
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class OpenAIMessage(BaseModel):
|
| 23 |
+
role: str = Field(..., description="system | user | assistant | tool")
|
| 24 |
+
content: str | list[OpenAIContentPart] | None = ""
|
| 25 |
+
tool_calls: list[dict[str, Any]] | None = Field(
|
| 26 |
+
default=None, description="assistant 发起的工具调用"
|
| 27 |
+
)
|
| 28 |
+
tool_call_id: str | None = Field(
|
| 29 |
+
default=None, description="tool 消息对应的 call id"
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
model_config = {"extra": "allow"}
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class OpenAIChatRequest(BaseModel):
|
| 36 |
+
"""OpenAI Chat Completions API 兼容请求体。"""
|
| 37 |
+
|
| 38 |
+
model: str = Field(default="", description="模型名,可忽略")
|
| 39 |
+
messages: list[OpenAIMessage] = Field(..., description="对话列表")
|
| 40 |
+
stream: bool = Field(default=False, description="是否流式返回")
|
| 41 |
+
tools: list[dict] | None = Field(
|
| 42 |
+
default=None,
|
| 43 |
+
description='工具列表,每项为 {"type":"function","function":{name,description,parameters,strict?}}',
|
| 44 |
+
)
|
| 45 |
+
tool_choice: str | dict | None = Field(
|
| 46 |
+
default=None,
|
| 47 |
+
description='工具选择: "auto"|"required"|"none" 或 {"type":"function","name":"xxx"}',
|
| 48 |
+
)
|
| 49 |
+
parallel_tool_calls: bool | None = Field(
|
| 50 |
+
default=None,
|
| 51 |
+
description="是否允许单次响应中并行多个 tool_call,false 时仅 0 或 1 个",
|
| 52 |
+
)
|
| 53 |
+
resume_session_id: str | None = Field(default=None, exclude=True)
|
| 54 |
+
upstream_model: str | None = Field(default=None, exclude=True)
|
| 55 |
+
attachment_files: list[InputAttachment] = Field(
|
| 56 |
+
default_factory=list,
|
| 57 |
+
exclude=True,
|
| 58 |
+
description="本次实际要发送给站点的附件,由 ChatHandler 根据 full_history 选择来源填充。",
|
| 59 |
+
)
|
| 60 |
+
# 仅供内部调度使用:最后一条 user 消息里的附件 & 所有 user 消息里的附件
|
| 61 |
+
attachment_files_last_user: list[InputAttachment] = Field(
|
| 62 |
+
default_factory=list, exclude=True
|
| 63 |
+
)
|
| 64 |
+
attachment_files_all_users: list[InputAttachment] = Field(
|
| 65 |
+
default_factory=list, exclude=True
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def _norm_content(c: str | list[OpenAIContentPart] | None) -> str:
|
| 70 |
+
"""将 content 转为单段字符串。仅支持官方格式:字符串或 type=text 的 content part(取 text 字段)。"""
|
| 71 |
+
if c is None:
|
| 72 |
+
return ""
|
| 73 |
+
if isinstance(c, str):
|
| 74 |
+
return strip_session_id_suffix(c)
|
| 75 |
+
if not isinstance(c, list):
|
| 76 |
+
return ""
|
| 77 |
+
return strip_session_id_suffix(
|
| 78 |
+
" ".join(
|
| 79 |
+
p.text or ""
|
| 80 |
+
for p in c
|
| 81 |
+
if isinstance(p, OpenAIContentPart) and p.type == "text" and p.text
|
| 82 |
+
)
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
REACT_STRICT_SUFFIX = (
|
| 87 |
+
"(严格 ReAct 执行模式;禁止输出「无法执行工具所以直接给方案」等解释或替代内容)"
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def extract_user_content(
|
| 92 |
+
messages: list[OpenAIMessage],
|
| 93 |
+
*,
|
| 94 |
+
has_tools: bool = False,
|
| 95 |
+
react_prompt_prefix: str = "",
|
| 96 |
+
full_history: bool = False,
|
| 97 |
+
) -> str:
|
| 98 |
+
"""
|
| 99 |
+
从 messages 中提取对话,拼成发给模型的 prompt。
|
| 100 |
+
网页/会话侧已有完整历史,只取尾部:最后一条为 user 时,从后向前找到最后一个 assistant(不包含),
|
| 101 |
+
取该 assistant 之后到末尾;最后一条为 tool 时,从后向前找到最后一个 user(不包含),取该 user 之后到末尾。
|
| 102 |
+
支持 user、assistant、tool 角色;assistant 的 tool_calls 与 tool 结果会拼回。
|
| 103 |
+
ReAct 模式:完整 ReAct Prompt 仅第一次对话传入(按完整 messages 判断 is_first_turn);后续只传尾部内容。
|
| 104 |
+
"""
|
| 105 |
+
if not messages:
|
| 106 |
+
return ""
|
| 107 |
+
|
| 108 |
+
parts: list[str] = []
|
| 109 |
+
|
| 110 |
+
# 重建会话时会把完整历史重新回放给站点,因此 tools 指令也需要重新注入。
|
| 111 |
+
is_first_turn = not any(m.role in ("assistant", "tool") for m in messages)
|
| 112 |
+
if has_tools and react_prompt_prefix and (full_history or is_first_turn):
|
| 113 |
+
parts.append(react_prompt_prefix)
|
| 114 |
+
|
| 115 |
+
if full_history:
|
| 116 |
+
tail = messages
|
| 117 |
+
else:
|
| 118 |
+
last = messages[-1]
|
| 119 |
+
if last.role == "user":
|
| 120 |
+
i = len(messages) - 1
|
| 121 |
+
while i >= 0 and messages[i].role != "assistant":
|
| 122 |
+
i -= 1
|
| 123 |
+
tail = messages[i + 1 :]
|
| 124 |
+
elif last.role == "tool":
|
| 125 |
+
i = len(messages) - 1
|
| 126 |
+
while i >= 0 and messages[i].role != "user":
|
| 127 |
+
i -= 1
|
| 128 |
+
tail = messages[i + 1 :]
|
| 129 |
+
else:
|
| 130 |
+
tail = messages[-2:]
|
| 131 |
+
|
| 132 |
+
for m in tail:
|
| 133 |
+
if m.role == "system":
|
| 134 |
+
txt = _norm_content(m.content)
|
| 135 |
+
if txt:
|
| 136 |
+
parts.append(f"System:{txt}")
|
| 137 |
+
elif m.role == "user":
|
| 138 |
+
txt = _norm_content(m.content)
|
| 139 |
+
if txt:
|
| 140 |
+
if has_tools:
|
| 141 |
+
parts.append(f"**User**: {txt} {REACT_STRICT_SUFFIX}")
|
| 142 |
+
else:
|
| 143 |
+
parts.append(f"User:{txt}")
|
| 144 |
+
elif m.role == "assistant":
|
| 145 |
+
tool_calls_list = list(m.tool_calls or [])
|
| 146 |
+
if tool_calls_list:
|
| 147 |
+
for tc in tool_calls_list:
|
| 148 |
+
fn = tc.get("function") or {}
|
| 149 |
+
call_id = tc.get("id", "")
|
| 150 |
+
name = fn.get("name", "")
|
| 151 |
+
args = fn.get("arguments", "{}")
|
| 152 |
+
parts.append(
|
| 153 |
+
f"**Assistant**:\n\n```\nAction: {name}\nAction Input: {args}\nCall ID: {call_id}\n```"
|
| 154 |
+
)
|
| 155 |
+
else:
|
| 156 |
+
txt = _norm_content(m.content)
|
| 157 |
+
if txt:
|
| 158 |
+
if has_tools:
|
| 159 |
+
parts.append(f"**Assistant**:\n\n{txt}")
|
| 160 |
+
else:
|
| 161 |
+
parts.append(f"Assistant:{txt}")
|
| 162 |
+
elif m.role == "tool":
|
| 163 |
+
txt = _norm_content(m.content)
|
| 164 |
+
call_id = m.tool_call_id or ""
|
| 165 |
+
parts.append(
|
| 166 |
+
f"**Observation(Call ID: {call_id})**: {txt}\n\n请根据以上观察结果继续。如需调用工具,输出 Thought / Action / Action Input;若任务已完成,输出 Final Answer。"
|
| 167 |
+
)
|
| 168 |
+
return "\n".join(parts)
|
core/app.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FastAPI 应用组装:配置加载、账号池、会话缓存、浏览器管理、插件注册、路由挂载。
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import asyncio
|
| 6 |
+
import logging
|
| 7 |
+
from contextlib import asynccontextmanager
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import AsyncIterator
|
| 10 |
+
|
| 11 |
+
from fastapi import FastAPI, Request
|
| 12 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 13 |
+
from fastapi.responses import FileResponse, JSONResponse
|
| 14 |
+
|
| 15 |
+
from core.account.pool import AccountPool
|
| 16 |
+
from core.api.auth import (
|
| 17 |
+
AdminLoginAttemptStore,
|
| 18 |
+
AdminSessionStore,
|
| 19 |
+
configured_config_login_lock_seconds,
|
| 20 |
+
configured_config_login_max_failures,
|
| 21 |
+
config_login_enabled,
|
| 22 |
+
ensure_config_secret_hashed,
|
| 23 |
+
refresh_runtime_auth_settings,
|
| 24 |
+
)
|
| 25 |
+
from core.api.anthropic_routes import create_anthropic_router
|
| 26 |
+
from core.api.chat_handler import ChatHandler
|
| 27 |
+
from core.api.config_routes import create_config_router
|
| 28 |
+
from core.api.routes import create_router
|
| 29 |
+
from core.config.repository import create_config_repository
|
| 30 |
+
from core.config.settings import get, get_bool
|
| 31 |
+
from core.constants import CDP_PORT_RANGE, CHROMIUM_BIN
|
| 32 |
+
from core.plugin.base import PluginRegistry
|
| 33 |
+
from core.plugin.claude import register_claude_plugin
|
| 34 |
+
from core.runtime.browser_manager import BrowserManager
|
| 35 |
+
from core.runtime.session_cache import SessionCache
|
| 36 |
+
|
| 37 |
+
logger = logging.getLogger(__name__)
|
| 38 |
+
STATIC_DIR = Path(__file__).resolve().parent / "static"
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@asynccontextmanager
|
| 42 |
+
async def lifespan(app: FastAPI) -> AsyncIterator[None]:
|
| 43 |
+
"""启动时初始化配置与 ChatHandler,关闭时不做持久化(会话缓存进程内)。"""
|
| 44 |
+
# 注册插件
|
| 45 |
+
register_claude_plugin()
|
| 46 |
+
|
| 47 |
+
repo = create_config_repository()
|
| 48 |
+
repo.init_schema()
|
| 49 |
+
ensure_config_secret_hashed(repo)
|
| 50 |
+
app.state.config_repo = repo
|
| 51 |
+
auth_settings = refresh_runtime_auth_settings(app)
|
| 52 |
+
groups = repo.load_groups()
|
| 53 |
+
|
| 54 |
+
chromium_bin = (get("browser", "chromium_bin") or "").strip() or CHROMIUM_BIN
|
| 55 |
+
headless = get_bool("browser", "headless", False)
|
| 56 |
+
no_sandbox = get_bool("browser", "no_sandbox", False)
|
| 57 |
+
disable_gpu = get_bool("browser", "disable_gpu", False)
|
| 58 |
+
disable_gpu_sandbox = get_bool("browser", "disable_gpu_sandbox", False)
|
| 59 |
+
cdp_wait_max_attempts = int(get("browser", "cdp_wait_max_attempts") or 90)
|
| 60 |
+
cdp_wait_interval_seconds = float(
|
| 61 |
+
get("browser", "cdp_wait_interval_seconds") or 2.0
|
| 62 |
+
)
|
| 63 |
+
cdp_wait_connect_timeout_seconds = float(
|
| 64 |
+
get("browser", "cdp_wait_connect_timeout_seconds") or 2.0
|
| 65 |
+
)
|
| 66 |
+
port_start = int(get("browser", "cdp_port_start") or 9223)
|
| 67 |
+
port_count = int(get("browser", "cdp_port_count") or 20)
|
| 68 |
+
port_range = (
|
| 69 |
+
list(range(port_start, port_start + port_count))
|
| 70 |
+
if port_count > 0
|
| 71 |
+
else list(CDP_PORT_RANGE)
|
| 72 |
+
)
|
| 73 |
+
api_keys = auth_settings.api_keys
|
| 74 |
+
pool = AccountPool.from_groups(groups)
|
| 75 |
+
session_cache = SessionCache()
|
| 76 |
+
browser_manager = BrowserManager(
|
| 77 |
+
chromium_bin=chromium_bin,
|
| 78 |
+
headless=headless,
|
| 79 |
+
no_sandbox=no_sandbox,
|
| 80 |
+
disable_gpu=disable_gpu,
|
| 81 |
+
disable_gpu_sandbox=disable_gpu_sandbox,
|
| 82 |
+
port_range=port_range,
|
| 83 |
+
cdp_wait_max_attempts=cdp_wait_max_attempts,
|
| 84 |
+
cdp_wait_interval_seconds=cdp_wait_interval_seconds,
|
| 85 |
+
cdp_wait_connect_timeout_seconds=cdp_wait_connect_timeout_seconds,
|
| 86 |
+
)
|
| 87 |
+
app.state.chat_handler = ChatHandler(
|
| 88 |
+
pool=pool,
|
| 89 |
+
session_cache=session_cache,
|
| 90 |
+
browser_manager=browser_manager,
|
| 91 |
+
config_repo=repo,
|
| 92 |
+
)
|
| 93 |
+
app.state.session_cache = session_cache
|
| 94 |
+
app.state.browser_manager = browser_manager
|
| 95 |
+
app.state.admin_sessions = AdminSessionStore()
|
| 96 |
+
app.state.admin_login_attempts = AdminLoginAttemptStore(
|
| 97 |
+
max_failures=configured_config_login_max_failures(),
|
| 98 |
+
lock_seconds=configured_config_login_lock_seconds(),
|
| 99 |
+
)
|
| 100 |
+
if not groups:
|
| 101 |
+
logger.warning("数据库无配置,服务已启动但当前无可用账号")
|
| 102 |
+
if api_keys:
|
| 103 |
+
logger.info("API 鉴权已启用,已加载 %d 个 API Key", len(api_keys))
|
| 104 |
+
if auth_settings.config_login_enabled:
|
| 105 |
+
logger.info(
|
| 106 |
+
"配置页登录已启用,失败 %d 次锁定 %d 秒",
|
| 107 |
+
app.state.admin_login_attempts.max_failures,
|
| 108 |
+
app.state.admin_login_attempts.lock_seconds,
|
| 109 |
+
)
|
| 110 |
+
try:
|
| 111 |
+
await app.state.chat_handler.prewarm_resident_browsers()
|
| 112 |
+
except Exception:
|
| 113 |
+
logger.exception("启动预热浏览器失败")
|
| 114 |
+
app.state.maintenance_task = asyncio.create_task(
|
| 115 |
+
app.state.chat_handler.run_maintenance_loop()
|
| 116 |
+
)
|
| 117 |
+
logger.info("服务已就绪,已注册 type: %s", ", ".join(PluginRegistry.all_types()))
|
| 118 |
+
yield
|
| 119 |
+
task = getattr(app.state, "maintenance_task", None)
|
| 120 |
+
handler = getattr(app.state, "chat_handler", None)
|
| 121 |
+
if handler is not None:
|
| 122 |
+
await handler.shutdown()
|
| 123 |
+
if task is not None:
|
| 124 |
+
try:
|
| 125 |
+
await task
|
| 126 |
+
except asyncio.CancelledError:
|
| 127 |
+
pass
|
| 128 |
+
app.state.chat_handler = None
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def create_app() -> FastAPI:
|
| 132 |
+
app = FastAPI(
|
| 133 |
+
title="Web2API(Plugin)",
|
| 134 |
+
description="按 type 路由的 OpenAI 兼容接口,baseUrl: http://ip:port/{type}/v1/...",
|
| 135 |
+
lifespan=lifespan,
|
| 136 |
+
)
|
| 137 |
+
app.add_middleware(
|
| 138 |
+
CORSMiddleware,
|
| 139 |
+
allow_origins=["*"],
|
| 140 |
+
allow_credentials=False,
|
| 141 |
+
allow_methods=["*"],
|
| 142 |
+
allow_headers=["*"],
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
@app.get("/", include_in_schema=False)
|
| 146 |
+
def root() -> FileResponse:
|
| 147 |
+
return FileResponse(STATIC_DIR / "index.html")
|
| 148 |
+
|
| 149 |
+
@app.get("/healthz", include_in_schema=False)
|
| 150 |
+
def healthz(request: Request) -> JSONResponse:
|
| 151 |
+
return JSONResponse(
|
| 152 |
+
{
|
| 153 |
+
"status": "ok",
|
| 154 |
+
"config_login_enabled": config_login_enabled(request),
|
| 155 |
+
"login": "/login",
|
| 156 |
+
"config": "/config",
|
| 157 |
+
}
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
app.include_router(create_router())
|
| 161 |
+
app.include_router(create_anthropic_router())
|
| 162 |
+
app.include_router(create_config_router())
|
| 163 |
+
return app
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
app = create_app()
|
core/config/__init__.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""配置层 数据模型与持久化(独立 DB,不修改现有 config_db)。"""
|
| 2 |
+
|
| 3 |
+
from core.config.schema import AccountConfig, ProxyGroupConfig
|
| 4 |
+
from core.config.repository import ConfigRepository
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"AccountConfig",
|
| 8 |
+
"ProxyGroupConfig",
|
| 9 |
+
"ConfigRepository",
|
| 10 |
+
]
|
core/config/repository.py
ADDED
|
@@ -0,0 +1,593 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
配置持久化:默认使用 SQLite;提供 DATABASE_URL / WEB2API_DATABASE_URL 时切换到 PostgreSQL。
|
| 3 |
+
表结构:proxy_group, account(含 name, type, auth JSON),以及 app_setting。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import sqlite3
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
from typing import Any
|
| 12 |
+
|
| 13 |
+
from core.config.schema import AccountConfig, ProxyGroupConfig, account_from_row
|
| 14 |
+
from core.config.settings import coerce_bool, get_database_url
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DB_FILENAME = "db.sqlite3"
|
| 18 |
+
DB_PATH_ENV_KEY = "WEB2API_DB_PATH"
|
| 19 |
+
APP_SETTING_AUTH_API_KEY = "auth.api_key"
|
| 20 |
+
APP_SETTING_AUTH_CONFIG_SECRET_HASH = "auth.config_secret_hash"
|
| 21 |
+
APP_SETTING_ENABLE_PRO_MODELS = "claude.enable_pro_models"
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _get_db_path() -> Path:
|
| 25 |
+
"""SQLite 文件路径。"""
|
| 26 |
+
configured = os.environ.get(DB_PATH_ENV_KEY, "").strip()
|
| 27 |
+
if configured:
|
| 28 |
+
return Path(configured).expanduser()
|
| 29 |
+
return Path(__file__).resolve().parent.parent.parent / DB_FILENAME
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def create_config_repository(
|
| 33 |
+
db_path: Path | None = None,
|
| 34 |
+
database_url: str | None = None,
|
| 35 |
+
) -> "ConfigRepository":
|
| 36 |
+
resolved_database_url = (
|
| 37 |
+
get_database_url().strip() if database_url is None else database_url.strip()
|
| 38 |
+
)
|
| 39 |
+
return ConfigRepository(
|
| 40 |
+
_PostgresConfigRepository(resolved_database_url)
|
| 41 |
+
if resolved_database_url
|
| 42 |
+
else _SqliteConfigRepository(db_path or _get_db_path())
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class _RepositoryBase:
|
| 47 |
+
def init_schema(self) -> None:
|
| 48 |
+
raise NotImplementedError
|
| 49 |
+
|
| 50 |
+
def load_groups(self) -> list[ProxyGroupConfig]:
|
| 51 |
+
raise NotImplementedError
|
| 52 |
+
|
| 53 |
+
def save_groups(self, groups: list[ProxyGroupConfig]) -> None:
|
| 54 |
+
raise NotImplementedError
|
| 55 |
+
|
| 56 |
+
def update_account_unfreeze_at(
|
| 57 |
+
self,
|
| 58 |
+
fingerprint_id: str,
|
| 59 |
+
account_name: str,
|
| 60 |
+
unfreeze_at: int | None,
|
| 61 |
+
) -> None:
|
| 62 |
+
raise NotImplementedError
|
| 63 |
+
|
| 64 |
+
def load_raw(self) -> list[dict[str, Any]]:
|
| 65 |
+
"""与前端/API 一致的原始列表格式。"""
|
| 66 |
+
groups = self.load_groups()
|
| 67 |
+
return [
|
| 68 |
+
{
|
| 69 |
+
"proxy_host": g.proxy_host,
|
| 70 |
+
"proxy_user": g.proxy_user,
|
| 71 |
+
"proxy_pass": g.proxy_pass,
|
| 72 |
+
"fingerprint_id": g.fingerprint_id,
|
| 73 |
+
"use_proxy": g.use_proxy,
|
| 74 |
+
"timezone": g.timezone,
|
| 75 |
+
"accounts": [
|
| 76 |
+
{
|
| 77 |
+
"name": a.name,
|
| 78 |
+
"type": a.type,
|
| 79 |
+
"auth": a.auth,
|
| 80 |
+
"enabled": a.enabled,
|
| 81 |
+
"unfreeze_at": a.unfreeze_at,
|
| 82 |
+
}
|
| 83 |
+
for a in g.accounts
|
| 84 |
+
],
|
| 85 |
+
}
|
| 86 |
+
for g in groups
|
| 87 |
+
]
|
| 88 |
+
|
| 89 |
+
def load_app_settings(self) -> dict[str, str]:
|
| 90 |
+
raise NotImplementedError
|
| 91 |
+
|
| 92 |
+
def get_app_setting(self, key: str) -> str | None:
|
| 93 |
+
value = self.load_app_settings().get(key)
|
| 94 |
+
return value if value is not None else None
|
| 95 |
+
|
| 96 |
+
def set_app_setting(self, key: str, value: str | None) -> None:
|
| 97 |
+
raise NotImplementedError
|
| 98 |
+
|
| 99 |
+
def save_raw(self, raw: list[dict[str, Any]]) -> None:
|
| 100 |
+
"""从 API/前端原始格式写入并保存。"""
|
| 101 |
+
groups = _raw_to_groups(raw)
|
| 102 |
+
self.save_groups(groups)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class _SqliteConfigRepository(_RepositoryBase):
|
| 106 |
+
def __init__(self, db_path: Path | None = None) -> None:
|
| 107 |
+
self._db_path = db_path or _get_db_path()
|
| 108 |
+
self._schema_initialized = False
|
| 109 |
+
|
| 110 |
+
def _conn(self) -> sqlite3.Connection:
|
| 111 |
+
self._db_path.parent.mkdir(parents=True, exist_ok=True)
|
| 112 |
+
return sqlite3.connect(self._db_path)
|
| 113 |
+
|
| 114 |
+
def _init_tables(self, conn: sqlite3.Connection) -> None:
|
| 115 |
+
conn.execute(
|
| 116 |
+
"""
|
| 117 |
+
CREATE TABLE IF NOT EXISTS proxy_group (
|
| 118 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 119 |
+
proxy_host TEXT NOT NULL,
|
| 120 |
+
proxy_user TEXT NOT NULL,
|
| 121 |
+
proxy_pass TEXT NOT NULL,
|
| 122 |
+
fingerprint_id TEXT NOT NULL DEFAULT '',
|
| 123 |
+
use_proxy INTEGER NOT NULL DEFAULT 1,
|
| 124 |
+
timezone TEXT
|
| 125 |
+
)
|
| 126 |
+
"""
|
| 127 |
+
)
|
| 128 |
+
conn.execute(
|
| 129 |
+
"""
|
| 130 |
+
CREATE TABLE IF NOT EXISTS account (
|
| 131 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 132 |
+
proxy_group_id INTEGER NOT NULL,
|
| 133 |
+
name TEXT NOT NULL,
|
| 134 |
+
type TEXT NOT NULL,
|
| 135 |
+
auth TEXT NOT NULL DEFAULT '{}',
|
| 136 |
+
enabled INTEGER NOT NULL DEFAULT 1,
|
| 137 |
+
FOREIGN KEY (proxy_group_id) REFERENCES proxy_group(id) ON DELETE CASCADE
|
| 138 |
+
)
|
| 139 |
+
"""
|
| 140 |
+
)
|
| 141 |
+
conn.execute(
|
| 142 |
+
"CREATE INDEX IF NOT EXISTS ix_account_proxy_group_id ON account(proxy_group_id)"
|
| 143 |
+
)
|
| 144 |
+
conn.execute("CREATE INDEX IF NOT EXISTS ix_account_type ON account(type)")
|
| 145 |
+
conn.execute(
|
| 146 |
+
"""
|
| 147 |
+
CREATE TABLE IF NOT EXISTS app_setting (
|
| 148 |
+
key TEXT PRIMARY KEY,
|
| 149 |
+
value TEXT NOT NULL DEFAULT ''
|
| 150 |
+
)
|
| 151 |
+
"""
|
| 152 |
+
)
|
| 153 |
+
try:
|
| 154 |
+
conn.execute("ALTER TABLE account ADD COLUMN unfreeze_at INTEGER")
|
| 155 |
+
except sqlite3.OperationalError:
|
| 156 |
+
pass
|
| 157 |
+
try:
|
| 158 |
+
conn.execute(
|
| 159 |
+
"ALTER TABLE account ADD COLUMN enabled INTEGER NOT NULL DEFAULT 1"
|
| 160 |
+
)
|
| 161 |
+
except sqlite3.OperationalError:
|
| 162 |
+
pass
|
| 163 |
+
try:
|
| 164 |
+
conn.execute(
|
| 165 |
+
"ALTER TABLE proxy_group ADD COLUMN use_proxy INTEGER NOT NULL DEFAULT 1"
|
| 166 |
+
)
|
| 167 |
+
except sqlite3.OperationalError:
|
| 168 |
+
pass
|
| 169 |
+
try:
|
| 170 |
+
conn.execute("ALTER TABLE proxy_group ADD COLUMN timezone TEXT")
|
| 171 |
+
except sqlite3.OperationalError:
|
| 172 |
+
pass
|
| 173 |
+
conn.commit()
|
| 174 |
+
|
| 175 |
+
def _ensure_schema(self) -> None:
|
| 176 |
+
if self._schema_initialized:
|
| 177 |
+
return
|
| 178 |
+
conn = self._conn()
|
| 179 |
+
try:
|
| 180 |
+
self._init_tables(conn)
|
| 181 |
+
self._schema_initialized = True
|
| 182 |
+
finally:
|
| 183 |
+
conn.close()
|
| 184 |
+
|
| 185 |
+
def init_schema(self) -> None:
|
| 186 |
+
self._ensure_schema()
|
| 187 |
+
|
| 188 |
+
def load_groups(self) -> list[ProxyGroupConfig]:
|
| 189 |
+
self._ensure_schema()
|
| 190 |
+
conn = self._conn()
|
| 191 |
+
try:
|
| 192 |
+
groups: list[ProxyGroupConfig] = []
|
| 193 |
+
group_rows = conn.execute(
|
| 194 |
+
"""
|
| 195 |
+
SELECT id, proxy_host, proxy_user, proxy_pass, fingerprint_id, use_proxy, timezone
|
| 196 |
+
FROM proxy_group ORDER BY id ASC
|
| 197 |
+
"""
|
| 198 |
+
).fetchall()
|
| 199 |
+
accounts_by_group: dict[int, list[AccountConfig]] = {}
|
| 200 |
+
for gid, name, type_, auth_json, enabled, unfreeze_at in conn.execute(
|
| 201 |
+
"""
|
| 202 |
+
SELECT proxy_group_id, name, type, auth, enabled, unfreeze_at
|
| 203 |
+
FROM account ORDER BY proxy_group_id ASC, id ASC
|
| 204 |
+
"""
|
| 205 |
+
).fetchall():
|
| 206 |
+
accounts_by_group.setdefault(int(gid), []).append(
|
| 207 |
+
account_from_row(
|
| 208 |
+
name,
|
| 209 |
+
type_,
|
| 210 |
+
auth_json or "{}",
|
| 211 |
+
enabled=bool(enabled) if enabled is not None else True,
|
| 212 |
+
unfreeze_at=unfreeze_at,
|
| 213 |
+
)
|
| 214 |
+
)
|
| 215 |
+
for gid, proxy_host, proxy_user, proxy_pass, fingerprint_id, use_proxy, timezone in group_rows:
|
| 216 |
+
groups.append(
|
| 217 |
+
ProxyGroupConfig(
|
| 218 |
+
proxy_host=proxy_host,
|
| 219 |
+
proxy_user=proxy_user,
|
| 220 |
+
proxy_pass=proxy_pass,
|
| 221 |
+
fingerprint_id=fingerprint_id or "",
|
| 222 |
+
use_proxy=bool(use_proxy),
|
| 223 |
+
timezone=timezone,
|
| 224 |
+
accounts=accounts_by_group.get(int(gid), []),
|
| 225 |
+
)
|
| 226 |
+
)
|
| 227 |
+
return groups
|
| 228 |
+
finally:
|
| 229 |
+
conn.close()
|
| 230 |
+
|
| 231 |
+
def save_groups(self, groups: list[ProxyGroupConfig]) -> None:
|
| 232 |
+
self._ensure_schema()
|
| 233 |
+
conn = self._conn()
|
| 234 |
+
try:
|
| 235 |
+
conn.execute("DELETE FROM account")
|
| 236 |
+
conn.execute("DELETE FROM proxy_group")
|
| 237 |
+
for group in groups:
|
| 238 |
+
cur = conn.execute(
|
| 239 |
+
"""
|
| 240 |
+
INSERT INTO proxy_group (proxy_host, proxy_user, proxy_pass, fingerprint_id, use_proxy, timezone)
|
| 241 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 242 |
+
""",
|
| 243 |
+
(
|
| 244 |
+
group.proxy_host,
|
| 245 |
+
group.proxy_user,
|
| 246 |
+
group.proxy_pass,
|
| 247 |
+
group.fingerprint_id,
|
| 248 |
+
1 if group.use_proxy else 0,
|
| 249 |
+
group.timezone,
|
| 250 |
+
),
|
| 251 |
+
)
|
| 252 |
+
gid = cur.lastrowid
|
| 253 |
+
for account in group.accounts:
|
| 254 |
+
conn.execute(
|
| 255 |
+
"""
|
| 256 |
+
INSERT INTO account (proxy_group_id, name, type, auth, enabled, unfreeze_at)
|
| 257 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 258 |
+
""",
|
| 259 |
+
(
|
| 260 |
+
gid,
|
| 261 |
+
account.name,
|
| 262 |
+
account.type,
|
| 263 |
+
account.auth_json(),
|
| 264 |
+
1 if account.enabled else 0,
|
| 265 |
+
account.unfreeze_at,
|
| 266 |
+
),
|
| 267 |
+
)
|
| 268 |
+
conn.commit()
|
| 269 |
+
finally:
|
| 270 |
+
conn.close()
|
| 271 |
+
|
| 272 |
+
def update_account_unfreeze_at(
|
| 273 |
+
self,
|
| 274 |
+
fingerprint_id: str,
|
| 275 |
+
account_name: str,
|
| 276 |
+
unfreeze_at: int | None,
|
| 277 |
+
) -> None:
|
| 278 |
+
self._ensure_schema()
|
| 279 |
+
conn = self._conn()
|
| 280 |
+
try:
|
| 281 |
+
conn.execute(
|
| 282 |
+
"""
|
| 283 |
+
UPDATE account SET unfreeze_at = ?
|
| 284 |
+
WHERE proxy_group_id = (SELECT id FROM proxy_group WHERE fingerprint_id = ?)
|
| 285 |
+
AND name = ?
|
| 286 |
+
""",
|
| 287 |
+
(unfreeze_at, fingerprint_id, account_name),
|
| 288 |
+
)
|
| 289 |
+
conn.commit()
|
| 290 |
+
finally:
|
| 291 |
+
conn.close()
|
| 292 |
+
|
| 293 |
+
def load_app_settings(self) -> dict[str, str]:
|
| 294 |
+
self._ensure_schema()
|
| 295 |
+
conn = self._conn()
|
| 296 |
+
try:
|
| 297 |
+
rows = conn.execute(
|
| 298 |
+
"SELECT key, value FROM app_setting ORDER BY key ASC"
|
| 299 |
+
).fetchall()
|
| 300 |
+
return {str(key): str(value) for key, value in rows}
|
| 301 |
+
finally:
|
| 302 |
+
conn.close()
|
| 303 |
+
|
| 304 |
+
def set_app_setting(self, key: str, value: str | None) -> None:
|
| 305 |
+
self._ensure_schema()
|
| 306 |
+
conn = self._conn()
|
| 307 |
+
try:
|
| 308 |
+
if value is None:
|
| 309 |
+
conn.execute("DELETE FROM app_setting WHERE key = ?", (key,))
|
| 310 |
+
else:
|
| 311 |
+
conn.execute(
|
| 312 |
+
"""
|
| 313 |
+
INSERT INTO app_setting (key, value) VALUES (?, ?)
|
| 314 |
+
ON CONFLICT(key) DO UPDATE SET value = excluded.value
|
| 315 |
+
""",
|
| 316 |
+
(key, value),
|
| 317 |
+
)
|
| 318 |
+
conn.commit()
|
| 319 |
+
finally:
|
| 320 |
+
conn.close()
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
class _PostgresConfigRepository(_RepositoryBase):
|
| 324 |
+
def __init__(self, database_url: str) -> None:
|
| 325 |
+
self._database_url = database_url
|
| 326 |
+
|
| 327 |
+
def _conn(self) -> Any:
|
| 328 |
+
import psycopg
|
| 329 |
+
|
| 330 |
+
return psycopg.connect(self._database_url)
|
| 331 |
+
|
| 332 |
+
def init_schema(self) -> None:
|
| 333 |
+
with self._conn() as conn:
|
| 334 |
+
with conn.cursor() as cur:
|
| 335 |
+
cur.execute(
|
| 336 |
+
"""
|
| 337 |
+
CREATE TABLE IF NOT EXISTS proxy_group (
|
| 338 |
+
id BIGSERIAL PRIMARY KEY,
|
| 339 |
+
proxy_host TEXT NOT NULL,
|
| 340 |
+
proxy_user TEXT NOT NULL,
|
| 341 |
+
proxy_pass TEXT NOT NULL,
|
| 342 |
+
fingerprint_id TEXT NOT NULL DEFAULT '',
|
| 343 |
+
use_proxy BOOLEAN NOT NULL DEFAULT TRUE,
|
| 344 |
+
timezone TEXT
|
| 345 |
+
)
|
| 346 |
+
"""
|
| 347 |
+
)
|
| 348 |
+
cur.execute(
|
| 349 |
+
"""
|
| 350 |
+
CREATE TABLE IF NOT EXISTS account (
|
| 351 |
+
id BIGSERIAL PRIMARY KEY,
|
| 352 |
+
proxy_group_id BIGINT NOT NULL REFERENCES proxy_group(id) ON DELETE CASCADE,
|
| 353 |
+
name TEXT NOT NULL,
|
| 354 |
+
type TEXT NOT NULL,
|
| 355 |
+
auth TEXT NOT NULL DEFAULT '{}',
|
| 356 |
+
enabled BOOLEAN NOT NULL DEFAULT TRUE,
|
| 357 |
+
unfreeze_at BIGINT
|
| 358 |
+
)
|
| 359 |
+
"""
|
| 360 |
+
)
|
| 361 |
+
cur.execute(
|
| 362 |
+
"CREATE INDEX IF NOT EXISTS ix_account_proxy_group_id ON account(proxy_group_id)"
|
| 363 |
+
)
|
| 364 |
+
cur.execute("CREATE INDEX IF NOT EXISTS ix_account_type ON account(type)")
|
| 365 |
+
cur.execute(
|
| 366 |
+
"""
|
| 367 |
+
CREATE TABLE IF NOT EXISTS app_setting (
|
| 368 |
+
key TEXT PRIMARY KEY,
|
| 369 |
+
value TEXT NOT NULL DEFAULT ''
|
| 370 |
+
)
|
| 371 |
+
"""
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
def load_groups(self) -> list[ProxyGroupConfig]:
|
| 375 |
+
groups: list[ProxyGroupConfig] = []
|
| 376 |
+
with self._conn() as conn:
|
| 377 |
+
with conn.cursor() as cur:
|
| 378 |
+
cur.execute(
|
| 379 |
+
"""
|
| 380 |
+
SELECT id, proxy_host, proxy_user, proxy_pass, fingerprint_id, use_proxy, timezone
|
| 381 |
+
FROM proxy_group ORDER BY id ASC
|
| 382 |
+
"""
|
| 383 |
+
)
|
| 384 |
+
group_rows = cur.fetchall()
|
| 385 |
+
cur.execute(
|
| 386 |
+
"""
|
| 387 |
+
SELECT proxy_group_id, name, type, auth, enabled, unfreeze_at
|
| 388 |
+
FROM account ORDER BY proxy_group_id ASC, id ASC
|
| 389 |
+
"""
|
| 390 |
+
)
|
| 391 |
+
accounts_by_group: dict[int, list[AccountConfig]] = {}
|
| 392 |
+
for gid, name, type_, auth_json, enabled, unfreeze_at in cur.fetchall():
|
| 393 |
+
accounts_by_group.setdefault(int(gid), []).append(
|
| 394 |
+
account_from_row(
|
| 395 |
+
name,
|
| 396 |
+
type_,
|
| 397 |
+
auth_json or "{}",
|
| 398 |
+
enabled=bool(enabled) if enabled is not None else True,
|
| 399 |
+
unfreeze_at=unfreeze_at,
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
for row in group_rows:
|
| 403 |
+
(
|
| 404 |
+
gid,
|
| 405 |
+
proxy_host,
|
| 406 |
+
proxy_user,
|
| 407 |
+
proxy_pass,
|
| 408 |
+
fingerprint_id,
|
| 409 |
+
use_proxy,
|
| 410 |
+
timezone,
|
| 411 |
+
) = row
|
| 412 |
+
groups.append(
|
| 413 |
+
ProxyGroupConfig(
|
| 414 |
+
proxy_host=proxy_host,
|
| 415 |
+
proxy_user=proxy_user,
|
| 416 |
+
proxy_pass=proxy_pass,
|
| 417 |
+
fingerprint_id=fingerprint_id or "",
|
| 418 |
+
use_proxy=bool(use_proxy),
|
| 419 |
+
timezone=timezone,
|
| 420 |
+
accounts=accounts_by_group.get(int(gid), []),
|
| 421 |
+
)
|
| 422 |
+
)
|
| 423 |
+
return groups
|
| 424 |
+
|
| 425 |
+
def save_groups(self, groups: list[ProxyGroupConfig]) -> None:
|
| 426 |
+
with self._conn() as conn:
|
| 427 |
+
with conn.cursor() as cur:
|
| 428 |
+
cur.execute("DELETE FROM account")
|
| 429 |
+
cur.execute("DELETE FROM proxy_group")
|
| 430 |
+
for group in groups:
|
| 431 |
+
cur.execute(
|
| 432 |
+
"""
|
| 433 |
+
INSERT INTO proxy_group (proxy_host, proxy_user, proxy_pass, fingerprint_id, use_proxy, timezone)
|
| 434 |
+
VALUES (%s, %s, %s, %s, %s, %s)
|
| 435 |
+
RETURNING id
|
| 436 |
+
""",
|
| 437 |
+
(
|
| 438 |
+
group.proxy_host,
|
| 439 |
+
group.proxy_user,
|
| 440 |
+
group.proxy_pass,
|
| 441 |
+
group.fingerprint_id,
|
| 442 |
+
group.use_proxy,
|
| 443 |
+
group.timezone,
|
| 444 |
+
),
|
| 445 |
+
)
|
| 446 |
+
gid = cur.fetchone()[0]
|
| 447 |
+
for account in group.accounts:
|
| 448 |
+
cur.execute(
|
| 449 |
+
"""
|
| 450 |
+
INSERT INTO account (proxy_group_id, name, type, auth, enabled, unfreeze_at)
|
| 451 |
+
VALUES (%s, %s, %s, %s, %s, %s)
|
| 452 |
+
""",
|
| 453 |
+
(
|
| 454 |
+
gid,
|
| 455 |
+
account.name,
|
| 456 |
+
account.type,
|
| 457 |
+
account.auth_json(),
|
| 458 |
+
account.enabled,
|
| 459 |
+
account.unfreeze_at,
|
| 460 |
+
),
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
def update_account_unfreeze_at(
|
| 464 |
+
self,
|
| 465 |
+
fingerprint_id: str,
|
| 466 |
+
account_name: str,
|
| 467 |
+
unfreeze_at: int | None,
|
| 468 |
+
) -> None:
|
| 469 |
+
with self._conn() as conn:
|
| 470 |
+
with conn.cursor() as cur:
|
| 471 |
+
cur.execute(
|
| 472 |
+
"""
|
| 473 |
+
UPDATE account SET unfreeze_at = %s
|
| 474 |
+
WHERE proxy_group_id = (
|
| 475 |
+
SELECT id FROM proxy_group WHERE fingerprint_id = %s ORDER BY id ASC LIMIT 1
|
| 476 |
+
)
|
| 477 |
+
AND name = %s
|
| 478 |
+
""",
|
| 479 |
+
(unfreeze_at, fingerprint_id, account_name),
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
def load_app_settings(self) -> dict[str, str]:
|
| 483 |
+
with self._conn() as conn:
|
| 484 |
+
with conn.cursor() as cur:
|
| 485 |
+
cur.execute("SELECT key, value FROM app_setting ORDER BY key ASC")
|
| 486 |
+
return {str(key): str(value) for key, value in cur.fetchall()}
|
| 487 |
+
|
| 488 |
+
def set_app_setting(self, key: str, value: str | None) -> None:
|
| 489 |
+
with self._conn() as conn:
|
| 490 |
+
with conn.cursor() as cur:
|
| 491 |
+
if value is None:
|
| 492 |
+
cur.execute("DELETE FROM app_setting WHERE key = %s", (key,))
|
| 493 |
+
else:
|
| 494 |
+
cur.execute(
|
| 495 |
+
"""
|
| 496 |
+
INSERT INTO app_setting (key, value) VALUES (%s, %s)
|
| 497 |
+
ON CONFLICT (key) DO UPDATE SET value = EXCLUDED.value
|
| 498 |
+
""",
|
| 499 |
+
(key, value),
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
|
| 503 |
+
class ConfigRepository(_RepositoryBase):
|
| 504 |
+
"""配置读写入口。"""
|
| 505 |
+
|
| 506 |
+
def __init__(self, backend: _RepositoryBase) -> None:
|
| 507 |
+
self._backend = backend
|
| 508 |
+
|
| 509 |
+
def init_schema(self) -> None:
|
| 510 |
+
self._backend.init_schema()
|
| 511 |
+
|
| 512 |
+
def load_groups(self) -> list[ProxyGroupConfig]:
|
| 513 |
+
return self._backend.load_groups()
|
| 514 |
+
|
| 515 |
+
def save_groups(self, groups: list[ProxyGroupConfig]) -> None:
|
| 516 |
+
self._backend.save_groups(groups)
|
| 517 |
+
|
| 518 |
+
def load_raw(self) -> list[dict[str, Any]]:
|
| 519 |
+
return self._backend.load_raw()
|
| 520 |
+
|
| 521 |
+
def load_app_settings(self) -> dict[str, str]:
|
| 522 |
+
return self._backend.load_app_settings()
|
| 523 |
+
|
| 524 |
+
def get_app_setting(self, key: str) -> str | None:
|
| 525 |
+
return self._backend.get_app_setting(key)
|
| 526 |
+
|
| 527 |
+
def set_app_setting(self, key: str, value: str | None) -> None:
|
| 528 |
+
self._backend.set_app_setting(key, value)
|
| 529 |
+
|
| 530 |
+
def save_raw(self, raw: list[dict[str, Any]]) -> None:
|
| 531 |
+
self._backend.save_raw(raw)
|
| 532 |
+
|
| 533 |
+
def update_account_unfreeze_at(
|
| 534 |
+
self,
|
| 535 |
+
fingerprint_id: str,
|
| 536 |
+
account_name: str,
|
| 537 |
+
unfreeze_at: int | None,
|
| 538 |
+
) -> None:
|
| 539 |
+
self._backend.update_account_unfreeze_at(
|
| 540 |
+
fingerprint_id,
|
| 541 |
+
account_name,
|
| 542 |
+
unfreeze_at,
|
| 543 |
+
)
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
def _raw_to_groups(raw: list[dict[str, Any]]) -> list[ProxyGroupConfig]:
|
| 547 |
+
"""将 API 原始列表转为 ProxyGroupConfig 列表。"""
|
| 548 |
+
groups: list[ProxyGroupConfig] = []
|
| 549 |
+
for group in raw:
|
| 550 |
+
accounts: list[AccountConfig] = []
|
| 551 |
+
for account in group.get("accounts", []):
|
| 552 |
+
name = str(account.get("name", "")).strip()
|
| 553 |
+
type_ = str(account.get("type", "")).strip() or "claude"
|
| 554 |
+
auth = account.get("auth")
|
| 555 |
+
if isinstance(auth, dict):
|
| 556 |
+
pass
|
| 557 |
+
elif isinstance(auth, str):
|
| 558 |
+
try:
|
| 559 |
+
import json
|
| 560 |
+
|
| 561 |
+
auth = json.loads(auth) or {}
|
| 562 |
+
except Exception:
|
| 563 |
+
auth = {}
|
| 564 |
+
else:
|
| 565 |
+
auth = {}
|
| 566 |
+
if name:
|
| 567 |
+
enabled = coerce_bool(account.get("enabled", True), True)
|
| 568 |
+
unfreeze_at = account.get("unfreeze_at")
|
| 569 |
+
if isinstance(unfreeze_at, (int, float)):
|
| 570 |
+
unfreeze_at = int(unfreeze_at)
|
| 571 |
+
else:
|
| 572 |
+
unfreeze_at = None
|
| 573 |
+
accounts.append(
|
| 574 |
+
AccountConfig(
|
| 575 |
+
name=name,
|
| 576 |
+
type=type_,
|
| 577 |
+
auth=auth,
|
| 578 |
+
enabled=enabled,
|
| 579 |
+
unfreeze_at=unfreeze_at,
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
groups.append(
|
| 583 |
+
ProxyGroupConfig(
|
| 584 |
+
proxy_host=str(group.get("proxy_host", "")),
|
| 585 |
+
proxy_user=str(group.get("proxy_user", "")),
|
| 586 |
+
proxy_pass=str(group.get("proxy_pass", "")),
|
| 587 |
+
fingerprint_id=str(group.get("fingerprint_id", "")),
|
| 588 |
+
use_proxy=coerce_bool(group.get("use_proxy", True), True),
|
| 589 |
+
timezone=group.get("timezone"),
|
| 590 |
+
accounts=accounts,
|
| 591 |
+
)
|
| 592 |
+
)
|
| 593 |
+
return groups
|
core/config/schema.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
配置数据模型:按代理 IP(指纹)分组,账号含 name / type / auth(JSON)。
|
| 3 |
+
不设 profile_id,user-data-dir 按指纹等由运行时拼接。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from dataclasses import dataclass, field
|
| 7 |
+
from typing import Any
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@dataclass(frozen=True)
|
| 11 |
+
class AccountConfig:
|
| 12 |
+
"""单个账号:名称、类别、认证 JSON。一个账号只属于一个 type。"""
|
| 13 |
+
|
| 14 |
+
name: str
|
| 15 |
+
type: str # 如 claude, chatgpt, kimi
|
| 16 |
+
auth: dict[str, Any] # 由各插件定义 key,如 claude 用 sessionKey
|
| 17 |
+
enabled: bool = True
|
| 18 |
+
unfreeze_at: int | None = (
|
| 19 |
+
None # Unix 时间戳,接口返回的解冻时间;None 或已过则视为可用
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def auth_json(self) -> str:
|
| 23 |
+
"""序列化为 JSON 字符串供 DB 存储。"""
|
| 24 |
+
import json
|
| 25 |
+
|
| 26 |
+
return json.dumps(self.auth, ensure_ascii=False)
|
| 27 |
+
|
| 28 |
+
def is_available(self) -> bool:
|
| 29 |
+
"""已启用且当前时间 >= 解冻时间则可用(无解冻时间视为可用)。"""
|
| 30 |
+
if not self.enabled:
|
| 31 |
+
return False
|
| 32 |
+
if self.unfreeze_at is None:
|
| 33 |
+
return True
|
| 34 |
+
import time
|
| 35 |
+
|
| 36 |
+
return time.time() >= self.unfreeze_at
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@dataclass
|
| 40 |
+
class ProxyGroupConfig:
|
| 41 |
+
"""一个代理 IP 组:代理参数 + 指纹 + 下属账号列表。"""
|
| 42 |
+
|
| 43 |
+
proxy_host: str
|
| 44 |
+
proxy_user: str
|
| 45 |
+
proxy_pass: str
|
| 46 |
+
fingerprint_id: str
|
| 47 |
+
use_proxy: bool = True
|
| 48 |
+
timezone: str | None = None
|
| 49 |
+
accounts: list[AccountConfig] = field(default_factory=list)
|
| 50 |
+
|
| 51 |
+
def account_ids(self) -> list[str]:
|
| 52 |
+
"""返回该组下账号的唯一标识,用于会话缓存等。格式 group_idx 由 repository 注入前不可用,这里用 name 区分。"""
|
| 53 |
+
return [a.name for a in self.accounts]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def account_from_row(
|
| 57 |
+
name: str,
|
| 58 |
+
type: str,
|
| 59 |
+
auth_json: str,
|
| 60 |
+
enabled: bool = True,
|
| 61 |
+
unfreeze_at: int | None = None,
|
| 62 |
+
) -> AccountConfig:
|
| 63 |
+
"""从 DB 行构造 AccountConfig。"""
|
| 64 |
+
import json
|
| 65 |
+
|
| 66 |
+
try:
|
| 67 |
+
auth = json.loads(auth_json) if auth_json else {}
|
| 68 |
+
except Exception:
|
| 69 |
+
auth = {}
|
| 70 |
+
return AccountConfig(
|
| 71 |
+
name=name,
|
| 72 |
+
type=type,
|
| 73 |
+
auth=auth,
|
| 74 |
+
enabled=enabled,
|
| 75 |
+
unfreeze_at=unfreeze_at,
|
| 76 |
+
)
|
core/config/settings.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
统一的 YAML 配置加载。
|
| 3 |
+
|
| 4 |
+
优先级:
|
| 5 |
+
1. WEB2API_CONFIG_PATH 指定的路径
|
| 6 |
+
2. 项目根目录下的 config.local.yaml
|
| 7 |
+
3. 项目根目录下的 config.yaml
|
| 8 |
+
|
| 9 |
+
同时支持通过环境变量覆盖单个配置项:
|
| 10 |
+
- 通用规则:WEB2API_<SECTION>_<KEY>
|
| 11 |
+
- 额外兼容:server.host -> HOST,server.port -> PORT
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
from __future__ import annotations
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
from pathlib import Path
|
| 18 |
+
from typing import Any
|
| 19 |
+
|
| 20 |
+
import yaml
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
_PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent
|
| 24 |
+
_CONFIG_ENV_KEY = "WEB2API_CONFIG_PATH"
|
| 25 |
+
_LOCAL_CONFIG_NAME = "config.local.yaml"
|
| 26 |
+
_DEFAULT_CONFIG_NAME = "config.yaml"
|
| 27 |
+
_ENV_MISSING = object()
|
| 28 |
+
_ENV_OVERRIDE_ALIASES: dict[tuple[str, str], tuple[str, ...]] = {
|
| 29 |
+
("server", "host"): ("HOST",),
|
| 30 |
+
("server", "port"): ("PORT",),
|
| 31 |
+
}
|
| 32 |
+
_DATABASE_URL_ENV_NAMES = ("WEB2API_DATABASE_URL", "DATABASE_URL")
|
| 33 |
+
_BOOL_TRUE_VALUES = {"1", "true", "yes", "on"}
|
| 34 |
+
_BOOL_FALSE_VALUES = {"0", "false", "no", "off"}
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _resolve_config_path() -> Path:
|
| 38 |
+
configured = os.environ.get(_CONFIG_ENV_KEY, "").strip()
|
| 39 |
+
if configured:
|
| 40 |
+
return Path(configured).expanduser()
|
| 41 |
+
local_config = _PROJECT_ROOT / _LOCAL_CONFIG_NAME
|
| 42 |
+
if local_config.exists():
|
| 43 |
+
return local_config
|
| 44 |
+
return _PROJECT_ROOT / _DEFAULT_CONFIG_NAME
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
_CONFIG_PATH = _resolve_config_path()
|
| 48 |
+
|
| 49 |
+
_config_cache: dict[str, Any] | None = None
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _env_override_names(section: str, key: str) -> tuple[str, ...]:
|
| 53 |
+
generic = f"WEB2API_{section}_{key}".upper().replace("-", "_")
|
| 54 |
+
aliases = _ENV_OVERRIDE_ALIASES.get((section, key), ())
|
| 55 |
+
ordered = [generic]
|
| 56 |
+
ordered.extend(alias for alias in aliases if alias != generic)
|
| 57 |
+
return tuple(ordered)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _get_env_override(section: str, key: str) -> Any:
|
| 61 |
+
for env_name in _env_override_names(section, key):
|
| 62 |
+
if env_name in os.environ:
|
| 63 |
+
return os.environ[env_name]
|
| 64 |
+
return _ENV_MISSING
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def has_env_override(section: str, key: str) -> bool:
|
| 68 |
+
return _get_env_override(section, key) is not _ENV_MISSING
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def get_config_path() -> Path:
|
| 72 |
+
return _CONFIG_PATH
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def reset_cache() -> None:
|
| 76 |
+
global _config_cache
|
| 77 |
+
_config_cache = None
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def load_config() -> dict[str, Any]:
|
| 81 |
+
"""按优先级加载配置文件,不存在时返回空 dict。"""
|
| 82 |
+
global _config_cache
|
| 83 |
+
if _config_cache is not None:
|
| 84 |
+
return _config_cache
|
| 85 |
+
if not _CONFIG_PATH.exists():
|
| 86 |
+
_config_cache = {}
|
| 87 |
+
return {}
|
| 88 |
+
try:
|
| 89 |
+
with _CONFIG_PATH.open("r", encoding="utf-8") as f:
|
| 90 |
+
data = yaml.safe_load(f) or {}
|
| 91 |
+
if not isinstance(data, dict):
|
| 92 |
+
_config_cache = {}
|
| 93 |
+
else:
|
| 94 |
+
_config_cache = dict(data)
|
| 95 |
+
except Exception:
|
| 96 |
+
_config_cache = {}
|
| 97 |
+
return _config_cache
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def get(section: str, key: str, default: Any = None) -> Any:
|
| 101 |
+
"""从配置中读取 section.key,环境变量优先,其次 YAML,最后返回 default。"""
|
| 102 |
+
env_override = _get_env_override(section, key)
|
| 103 |
+
if env_override is not _ENV_MISSING:
|
| 104 |
+
return env_override
|
| 105 |
+
cfg = load_config().get(section) or {}
|
| 106 |
+
if not isinstance(cfg, dict):
|
| 107 |
+
return default
|
| 108 |
+
val = cfg.get(key)
|
| 109 |
+
return val if val is not None else default
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def coerce_bool(value: Any, default: bool = False) -> bool:
|
| 113 |
+
if isinstance(value, bool):
|
| 114 |
+
return value
|
| 115 |
+
if isinstance(value, (int, float)):
|
| 116 |
+
return bool(value)
|
| 117 |
+
if isinstance(value, str):
|
| 118 |
+
normalized = value.strip().lower()
|
| 119 |
+
if normalized in _BOOL_TRUE_VALUES:
|
| 120 |
+
return True
|
| 121 |
+
if normalized in _BOOL_FALSE_VALUES:
|
| 122 |
+
return False
|
| 123 |
+
return bool(default)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def get_bool(section: str, key: str, default: bool = False) -> bool:
|
| 127 |
+
"""从配置读取布尔值,兼容 true/false、1/0、yes/no、on/off。"""
|
| 128 |
+
return coerce_bool(get(section, key, default), default)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def get_server_host(default: str = "127.0.0.1") -> str:
|
| 132 |
+
return str(get("server", "host") or default).strip() or default
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def get_server_port(default: int = 8001) -> int:
|
| 136 |
+
try:
|
| 137 |
+
return int(str(get("server", "port") or default).strip())
|
| 138 |
+
except Exception:
|
| 139 |
+
return default
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def get_database_url(default: str = "") -> str:
|
| 143 |
+
for env_name in _DATABASE_URL_ENV_NAMES:
|
| 144 |
+
value = os.environ.get(env_name, "").strip()
|
| 145 |
+
if value:
|
| 146 |
+
return value
|
| 147 |
+
return default
|
core/constants.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""全局常量:浏览器路径、CDP 端口等(新架构专用)。"""
|
| 2 |
+
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
|
| 5 |
+
# 与现有 multi_web2api 保持一致,便于同机运行时分端口
|
| 6 |
+
CHROMIUM_BIN = "/Applications/Chromium.app/Contents/MacOS/Chromium"
|
| 7 |
+
REMOTE_DEBUGGING_PORT = 9223 # 默认端口,单浏览器兼容
|
| 8 |
+
# 多浏览器并存时的端口池(按 ProxyKey 各占一端口,仅当 refcount=0 时关闭并回收端口)
|
| 9 |
+
CDP_PORT_RANGE = list(range(9223, 9243)) # 9223..9232,最多 20 个并发浏览器
|
| 10 |
+
CDP_ENDPOINT = "http://127.0.0.1:9223"
|
| 11 |
+
TIMEZONE = "America/Chicago"
|
| 12 |
+
USER_DATA_DIR_PREFIX = "fp-data" # user_data_dir = home / fp-data / fingerprint_id
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def user_data_dir(fingerprint_id: str) -> Path:
|
| 16 |
+
"""按指纹 ID 拼接 user-data-dir,不依赖 profile_id。"""
|
| 17 |
+
return Path.home() / USER_DATA_DIR_PREFIX / fingerprint_id
|
core/hub/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hub 层:以 **OpenAI 语义**作为唯一中间态。
|
| 3 |
+
|
| 4 |
+
设计目标:
|
| 5 |
+
- 插件侧:把“站点平台格式”转换成 OpenAI 语义(请求 + 结构化流事件)。
|
| 6 |
+
- 协议侧:把 OpenAI 语义转换成不同对外协议(OpenAI / Anthropic / Kimi ...)。
|
| 7 |
+
|
| 8 |
+
当前仓库历史上存在 Canonical 模型用于多协议解析;Hub 层用于把“内部执行语义”
|
| 9 |
+
固定为 OpenAI,降低插件/协议扩展的学习成本。
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from .schemas import OpenAIStreamEvent
|
| 13 |
+
|
| 14 |
+
__all__ = ["OpenAIStreamEvent"]
|
core/hub/openai_sse.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
把 OpenAIStreamEvent 编码为 OpenAI ChatCompletions SSE chunk。
|
| 3 |
+
|
| 4 |
+
这是 Hub 层的“协议输出工具”,用于把插件输出的结构化事件流转换为
|
| 5 |
+
OpenAI 兼容的 `data: {...}\\n\\n` 片段。
|
| 6 |
+
|
| 7 |
+
当前不替换既有渲染链路,仅提供给后续协议/插件扩展使用。
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from __future__ import annotations
|
| 11 |
+
|
| 12 |
+
import json
|
| 13 |
+
import time
|
| 14 |
+
import uuid as uuid_mod
|
| 15 |
+
from collections.abc import AsyncIterator, Iterator
|
| 16 |
+
|
| 17 |
+
from core.hub.schemas import OpenAIStreamEvent
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def make_openai_stream_context(*, model: str) -> tuple[str, int]:
|
| 21 |
+
"""生成 OpenAI SSE 上下文:chat_id + created。"""
|
| 22 |
+
chat_id = f"chatcmpl-{uuid_mod.uuid4().hex[:24]}"
|
| 23 |
+
created = int(time.time())
|
| 24 |
+
# model 由上层写入 payload
|
| 25 |
+
del model
|
| 26 |
+
return chat_id, created
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _chunk(
|
| 30 |
+
*,
|
| 31 |
+
chat_id: str,
|
| 32 |
+
model: str,
|
| 33 |
+
created: int,
|
| 34 |
+
delta: dict,
|
| 35 |
+
finish_reason: str | None = None,
|
| 36 |
+
) -> str:
|
| 37 |
+
return (
|
| 38 |
+
"data: "
|
| 39 |
+
+ json.dumps(
|
| 40 |
+
{
|
| 41 |
+
"id": chat_id,
|
| 42 |
+
"object": "chat.completion.chunk",
|
| 43 |
+
"created": created,
|
| 44 |
+
"model": model,
|
| 45 |
+
"choices": [
|
| 46 |
+
{
|
| 47 |
+
"index": 0,
|
| 48 |
+
"delta": delta,
|
| 49 |
+
"logprobs": None,
|
| 50 |
+
"finish_reason": finish_reason,
|
| 51 |
+
}
|
| 52 |
+
],
|
| 53 |
+
},
|
| 54 |
+
ensure_ascii=False,
|
| 55 |
+
)
|
| 56 |
+
+ "\n\n"
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def encode_openai_sse_events(
|
| 61 |
+
events: Iterator[OpenAIStreamEvent],
|
| 62 |
+
*,
|
| 63 |
+
chat_id: str,
|
| 64 |
+
model: str,
|
| 65 |
+
created: int,
|
| 66 |
+
) -> Iterator[str]:
|
| 67 |
+
"""同步编码器:OpenAIStreamEvent -> OpenAI SSE strings。"""
|
| 68 |
+
# 兼容主流 OpenAI SSE 客户端:先发一帧 role:assistant + content:""
|
| 69 |
+
yield _chunk(
|
| 70 |
+
chat_id=chat_id,
|
| 71 |
+
model=model,
|
| 72 |
+
created=created,
|
| 73 |
+
delta={"role": "assistant", "content": ""},
|
| 74 |
+
finish_reason=None,
|
| 75 |
+
)
|
| 76 |
+
for ev in events:
|
| 77 |
+
if ev.type == "content_delta":
|
| 78 |
+
if ev.content:
|
| 79 |
+
yield _chunk(
|
| 80 |
+
chat_id=chat_id,
|
| 81 |
+
model=model,
|
| 82 |
+
created=created,
|
| 83 |
+
delta={"content": ev.content},
|
| 84 |
+
finish_reason=None,
|
| 85 |
+
)
|
| 86 |
+
elif ev.type == "tool_call_delta":
|
| 87 |
+
if ev.tool_calls:
|
| 88 |
+
yield _chunk(
|
| 89 |
+
chat_id=chat_id,
|
| 90 |
+
model=model,
|
| 91 |
+
created=created,
|
| 92 |
+
delta={"tool_calls": [tc.model_dump() for tc in ev.tool_calls]},
|
| 93 |
+
finish_reason=None,
|
| 94 |
+
)
|
| 95 |
+
elif ev.type == "finish":
|
| 96 |
+
# OpenAI 的结束 chunk 允许 delta 为空对象
|
| 97 |
+
yield _chunk(
|
| 98 |
+
chat_id=chat_id,
|
| 99 |
+
model=model,
|
| 100 |
+
created=created,
|
| 101 |
+
delta={},
|
| 102 |
+
finish_reason=ev.finish_reason or "stop",
|
| 103 |
+
)
|
| 104 |
+
yield "data: [DONE]\n\n"
|
| 105 |
+
return
|
| 106 |
+
elif ev.type == "error":
|
| 107 |
+
# OpenAI SSE 没有标准 error 事件,这里用 data 包一层 error 对象(与现有实现一致风格)
|
| 108 |
+
msg = ev.error or "unknown error"
|
| 109 |
+
yield (
|
| 110 |
+
"data: "
|
| 111 |
+
+ json.dumps(
|
| 112 |
+
{"error": {"message": msg, "type": "server_error"}},
|
| 113 |
+
ensure_ascii=False,
|
| 114 |
+
)
|
| 115 |
+
+ "\n\n"
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
async def encode_openai_sse_events_async(
|
| 120 |
+
events: AsyncIterator[OpenAIStreamEvent],
|
| 121 |
+
*,
|
| 122 |
+
chat_id: str,
|
| 123 |
+
model: str,
|
| 124 |
+
created: int,
|
| 125 |
+
) -> AsyncIterator[str]:
|
| 126 |
+
"""异步编码器:OpenAIStreamEvent -> OpenAI SSE strings。"""
|
| 127 |
+
async for ev in events:
|
| 128 |
+
for out in encode_openai_sse_events(
|
| 129 |
+
iter([ev]),
|
| 130 |
+
chat_id=chat_id,
|
| 131 |
+
model=model,
|
| 132 |
+
created=created,
|
| 133 |
+
):
|
| 134 |
+
yield out
|
core/hub/schemas.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
OpenAI 语义的结构化流事件(唯一流式中间态)。
|
| 3 |
+
|
| 4 |
+
整条链路:插件产出字符串流 → core 包装为 content_delta + finish →
|
| 5 |
+
协议适配层消费事件、编码为各协议 SSE(OpenAI / Anthropic / 未来 Kimi 等)。
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from __future__ import annotations
|
| 9 |
+
|
| 10 |
+
from typing import Any, Literal
|
| 11 |
+
|
| 12 |
+
from pydantic import BaseModel, Field
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class OpenAIToolCallDelta(BaseModel):
|
| 16 |
+
"""OpenAI stream delta 中的 tool_calls[?] 片段(最小必要字段)。"""
|
| 17 |
+
|
| 18 |
+
index: int = 0
|
| 19 |
+
id: str | None = None
|
| 20 |
+
type: Literal["function"] = "function"
|
| 21 |
+
function: dict[str, Any] = Field(default_factory=dict)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class OpenAIStreamEvent(BaseModel):
|
| 25 |
+
"""
|
| 26 |
+
OpenAI 语义的“内部流事件”。
|
| 27 |
+
- content_delta:增量文本(delta.content)
|
| 28 |
+
- tool_call_delta:工具调用增量(delta.tool_calls)
|
| 29 |
+
- finish:结束(finish_reason)
|
| 30 |
+
- error:错误
|
| 31 |
+
协议适配层负责将事件序列化为目标协议的 SSE/JSON。
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
type: Literal["content_delta", "tool_call_delta", "finish", "error"]
|
| 35 |
+
|
| 36 |
+
# content_delta
|
| 37 |
+
content: str | None = None
|
| 38 |
+
|
| 39 |
+
# tool_call_delta
|
| 40 |
+
tool_calls: list[OpenAIToolCallDelta] | None = None
|
| 41 |
+
|
| 42 |
+
# finish
|
| 43 |
+
finish_reason: str | None = None
|
| 44 |
+
|
| 45 |
+
# error
|
| 46 |
+
error: str | None = None
|
core/plugin/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""插件层:抽象接口与注册表,各 type 实现 create_page / apply_auth / create_conversation / stream_completion。"""
|
| 2 |
+
|
| 3 |
+
from core.plugin.base import AbstractPlugin, BaseSitePlugin, PluginRegistry, SiteConfig
|
| 4 |
+
|
| 5 |
+
__all__ = ["AbstractPlugin", "BaseSitePlugin", "PluginRegistry", "SiteConfig"]
|
core/plugin/base.py
ADDED
|
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
插件抽象与注册表:type_name -> 插件实现。
|
| 3 |
+
|
| 4 |
+
三层设计:
|
| 5 |
+
AbstractPlugin — 最底层接口,理论上支持任意协议(非 Cookie、非 SSE 的站点也能接)。
|
| 6 |
+
BaseSitePlugin — Cookie 认证 + SSE 流式站点的通用编排,插件开发者继承它只需实现 5 个 hook。
|
| 7 |
+
PluginRegistry — 全局注册表。
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import json
|
| 11 |
+
import logging
|
| 12 |
+
from abc import ABC, abstractmethod
|
| 13 |
+
from dataclasses import dataclass
|
| 14 |
+
from typing import Any, AsyncIterator
|
| 15 |
+
|
| 16 |
+
from playwright.async_api import BrowserContext, Page
|
| 17 |
+
|
| 18 |
+
from core.api.schemas import InputAttachment
|
| 19 |
+
from core.config.settings import get
|
| 20 |
+
from core.plugin.errors import ( # noqa: F401 — re-export for backward compat
|
| 21 |
+
AccountFrozenError,
|
| 22 |
+
BrowserResourceInvalidError,
|
| 23 |
+
)
|
| 24 |
+
from core.plugin.helpers import (
|
| 25 |
+
apply_cookie_auth,
|
| 26 |
+
create_page_for_site,
|
| 27 |
+
stream_completion_via_sse,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
logger = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@dataclass(frozen=True)
|
| 34 |
+
class ResolvedModel:
|
| 35 |
+
public_model: str
|
| 36 |
+
upstream_model: str
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
# ---------------------------------------------------------------------------
|
| 40 |
+
# SiteConfig:纯声明式站点配置
|
| 41 |
+
# ---------------------------------------------------------------------------
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@dataclass
|
| 45 |
+
class SiteConfig:
|
| 46 |
+
"""Cookie 认证站点的声明式配置,插件开发者只需填字段,无需写任何方法。"""
|
| 47 |
+
|
| 48 |
+
start_url: str
|
| 49 |
+
api_base: str
|
| 50 |
+
cookie_name: str
|
| 51 |
+
cookie_domain: str
|
| 52 |
+
auth_keys: list[str]
|
| 53 |
+
config_section: str = (
|
| 54 |
+
"" # config.yaml 中的 section,如 "claude",用于覆盖 start_url/api_base
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
# ---------------------------------------------------------------------------
|
| 59 |
+
# AbstractPlugin — 最底层抽象接口
|
| 60 |
+
# ---------------------------------------------------------------------------
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class AbstractPlugin(ABC):
|
| 64 |
+
"""
|
| 65 |
+
各 type(如 claude、kimi)需实现此接口并注册。
|
| 66 |
+
若站点基于 Cookie + SSE,推荐直接继承 BaseSitePlugin 而非此类。
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
def __init__(self) -> None:
|
| 70 |
+
self._session_state: dict[str, dict[str, Any]] = {}
|
| 71 |
+
|
| 72 |
+
type_name: str
|
| 73 |
+
|
| 74 |
+
async def create_page(
|
| 75 |
+
self, context: BrowserContext, reuse_page: Page | None = None
|
| 76 |
+
) -> Page:
|
| 77 |
+
raise NotImplementedError
|
| 78 |
+
|
| 79 |
+
async def apply_auth(
|
| 80 |
+
self,
|
| 81 |
+
context: BrowserContext,
|
| 82 |
+
page: Page,
|
| 83 |
+
auth: dict[str, Any],
|
| 84 |
+
*,
|
| 85 |
+
reload: bool = True,
|
| 86 |
+
**kwargs: Any,
|
| 87 |
+
) -> None:
|
| 88 |
+
raise NotImplementedError
|
| 89 |
+
|
| 90 |
+
async def ensure_request_ready(
|
| 91 |
+
self,
|
| 92 |
+
context: BrowserContext,
|
| 93 |
+
page: Page,
|
| 94 |
+
*,
|
| 95 |
+
request_id: str = "",
|
| 96 |
+
session_id: str | None = None,
|
| 97 |
+
phase: str = "",
|
| 98 |
+
account_id: str = "",
|
| 99 |
+
) -> None:
|
| 100 |
+
del context, page, request_id, session_id, phase, account_id
|
| 101 |
+
|
| 102 |
+
async def create_conversation(
|
| 103 |
+
self,
|
| 104 |
+
context: BrowserContext,
|
| 105 |
+
page: Page,
|
| 106 |
+
**kwargs: Any,
|
| 107 |
+
) -> str | None:
|
| 108 |
+
raise NotImplementedError
|
| 109 |
+
|
| 110 |
+
async def stream_completion(
|
| 111 |
+
self,
|
| 112 |
+
context: BrowserContext,
|
| 113 |
+
page: Page,
|
| 114 |
+
session_id: str,
|
| 115 |
+
message: str,
|
| 116 |
+
**kwargs: Any,
|
| 117 |
+
) -> AsyncIterator[str]:
|
| 118 |
+
if False:
|
| 119 |
+
yield # 使抽象方法为 async generator,与子类一致,便于 async for 迭代
|
| 120 |
+
raise NotImplementedError
|
| 121 |
+
|
| 122 |
+
def parse_session_id(self, messages: list[dict[str, Any]]) -> str | None:
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
def is_stream_end_event(self, payload: str) -> bool:
|
| 126 |
+
"""判断某条流式 payload 是否表示本轮响应已正常结束。默认不识别。"""
|
| 127 |
+
return False
|
| 128 |
+
|
| 129 |
+
def has_session(self, session_id: str) -> bool:
|
| 130 |
+
return session_id in self._session_state
|
| 131 |
+
|
| 132 |
+
def drop_session(self, session_id: str) -> None:
|
| 133 |
+
self._session_state.pop(session_id, None)
|
| 134 |
+
|
| 135 |
+
def drop_sessions(self, session_ids: list[str] | set[str]) -> None:
|
| 136 |
+
for session_id in session_ids:
|
| 137 |
+
self._session_state.pop(session_id, None)
|
| 138 |
+
|
| 139 |
+
def model_mapping(self) -> dict[str, str] | None:
|
| 140 |
+
"""子类可覆盖;BaseSitePlugin 会从 config_section 的 model_mapping 读取。"""
|
| 141 |
+
return None
|
| 142 |
+
|
| 143 |
+
def normalized_model_mapping(self) -> dict[str, str]:
|
| 144 |
+
mapping = self.model_mapping()
|
| 145 |
+
if not isinstance(mapping, dict) or not mapping:
|
| 146 |
+
raise ValueError("model_mapping is not implemented")
|
| 147 |
+
normalized: dict[str, str] = {}
|
| 148 |
+
for public_model, upstream_model in mapping.items():
|
| 149 |
+
public_id = str(public_model or "").strip()
|
| 150 |
+
upstream_id = str(upstream_model or "").strip()
|
| 151 |
+
if public_id and upstream_id:
|
| 152 |
+
normalized[public_id] = upstream_id
|
| 153 |
+
if not normalized:
|
| 154 |
+
raise ValueError("model_mapping is not implemented")
|
| 155 |
+
return normalized
|
| 156 |
+
|
| 157 |
+
def listed_model_mapping(self) -> dict[str, str]:
|
| 158 |
+
return self.normalized_model_mapping()
|
| 159 |
+
|
| 160 |
+
def default_public_model(self) -> str:
|
| 161 |
+
listed = self.listed_model_mapping()
|
| 162 |
+
if listed:
|
| 163 |
+
return next(iter(listed))
|
| 164 |
+
return next(iter(self.normalized_model_mapping()))
|
| 165 |
+
|
| 166 |
+
def resolve_model(self, model: str | None) -> ResolvedModel:
|
| 167 |
+
mapping = self.normalized_model_mapping()
|
| 168 |
+
requested = str(model or "").strip()
|
| 169 |
+
if not requested:
|
| 170 |
+
default_public = self.default_public_model()
|
| 171 |
+
return ResolvedModel(
|
| 172 |
+
public_model=default_public,
|
| 173 |
+
upstream_model=mapping[default_public],
|
| 174 |
+
)
|
| 175 |
+
if requested in mapping:
|
| 176 |
+
return ResolvedModel(
|
| 177 |
+
public_model=requested,
|
| 178 |
+
upstream_model=mapping[requested],
|
| 179 |
+
)
|
| 180 |
+
for public_model, upstream_model in mapping.items():
|
| 181 |
+
if requested == upstream_model:
|
| 182 |
+
return ResolvedModel(
|
| 183 |
+
public_model=public_model,
|
| 184 |
+
upstream_model=upstream_model,
|
| 185 |
+
)
|
| 186 |
+
supported = ", ".join(mapping.keys())
|
| 187 |
+
raise ValueError(f"Unknown model: {requested}. Supported models: {supported}")
|
| 188 |
+
|
| 189 |
+
def on_http_error(self, message: str, headers: dict[str, str] | None) -> int | None:
|
| 190 |
+
return None
|
| 191 |
+
|
| 192 |
+
def stream_transport(self) -> str:
|
| 193 |
+
return "page_fetch"
|
| 194 |
+
|
| 195 |
+
def stream_transport_options(
|
| 196 |
+
self,
|
| 197 |
+
context: BrowserContext,
|
| 198 |
+
page: Page,
|
| 199 |
+
session_id: str,
|
| 200 |
+
state: dict[str, Any],
|
| 201 |
+
**kwargs: Any,
|
| 202 |
+
) -> dict[str, Any]:
|
| 203 |
+
del context, page, session_id, state
|
| 204 |
+
options: dict[str, Any] = {}
|
| 205 |
+
proxy_url = str(kwargs.get("proxy_url") or "").strip()
|
| 206 |
+
proxy_auth = kwargs.get("proxy_auth")
|
| 207 |
+
if proxy_url:
|
| 208 |
+
options["proxy_url"] = proxy_url
|
| 209 |
+
if isinstance(proxy_auth, tuple) and len(proxy_auth) == 2:
|
| 210 |
+
options["proxy_auth"] = proxy_auth
|
| 211 |
+
return options
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
# ---------------------------------------------------------------------------
|
| 215 |
+
# BaseSitePlugin — Cookie + SSE 站点的通用编排
|
| 216 |
+
# ---------------------------------------------------------------------------
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class BaseSitePlugin(AbstractPlugin):
|
| 220 |
+
"""
|
| 221 |
+
Cookie 认证 + SSE 流式站点的公共基类。
|
| 222 |
+
|
| 223 |
+
插件开发者继承此类后,只需:
|
| 224 |
+
1. 声明 site = SiteConfig(...) — 站点配置
|
| 225 |
+
2. 实现 fetch_site_context() — 获取站点上下文(如 org/user 信息)
|
| 226 |
+
3. 实现 create_session() — 调用站点 API 创建会话
|
| 227 |
+
4. 实现 build_completion_url/body() — 拼补全请求的 URL 与 body
|
| 228 |
+
5. 实现 parse_stream_event() — 解析单条流式事件(如 SSE data)
|
| 229 |
+
|
| 230 |
+
create_page / apply_auth / create_conversation / stream_completion
|
| 231 |
+
均由基类自动编排,无需重写。
|
| 232 |
+
"""
|
| 233 |
+
|
| 234 |
+
site: SiteConfig # 子类必须赋值
|
| 235 |
+
|
| 236 |
+
# ---- 从 config.yaml 读取的 URL 属性(config_section 有值时覆盖默认) ----
|
| 237 |
+
|
| 238 |
+
@property
|
| 239 |
+
def start_url(self) -> str:
|
| 240 |
+
if self.site.config_section:
|
| 241 |
+
url = get(self.site.config_section, "start_url")
|
| 242 |
+
if url:
|
| 243 |
+
return str(url).strip()
|
| 244 |
+
return self.site.start_url
|
| 245 |
+
|
| 246 |
+
@property
|
| 247 |
+
def api_base(self) -> str:
|
| 248 |
+
if self.site.config_section:
|
| 249 |
+
base = get(self.site.config_section, "api_base")
|
| 250 |
+
if base:
|
| 251 |
+
return str(base).strip()
|
| 252 |
+
return self.site.api_base
|
| 253 |
+
|
| 254 |
+
def model_mapping(self) -> dict[str, str] | None:
|
| 255 |
+
"""从 config 的 config_section.model_mapping 读取;未配置时返回 None。"""
|
| 256 |
+
if self.site.config_section:
|
| 257 |
+
m = get(self.site.config_section, "model_mapping")
|
| 258 |
+
if isinstance(m, dict) and m:
|
| 259 |
+
return {str(k): str(v) for k, v in m.items()}
|
| 260 |
+
return None
|
| 261 |
+
|
| 262 |
+
# ---- 基类全自动实现,子类无需碰 ----
|
| 263 |
+
|
| 264 |
+
async def create_page(
|
| 265 |
+
self,
|
| 266 |
+
context: BrowserContext,
|
| 267 |
+
reuse_page: Page | None = None,
|
| 268 |
+
) -> Page:
|
| 269 |
+
return await create_page_for_site(
|
| 270 |
+
context, self.start_url, reuse_page=reuse_page
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
async def apply_auth(
|
| 274 |
+
self,
|
| 275 |
+
context: BrowserContext,
|
| 276 |
+
page: Page,
|
| 277 |
+
auth: dict[str, Any],
|
| 278 |
+
*,
|
| 279 |
+
reload: bool = True,
|
| 280 |
+
) -> None:
|
| 281 |
+
await apply_cookie_auth(
|
| 282 |
+
context,
|
| 283 |
+
page,
|
| 284 |
+
auth,
|
| 285 |
+
self.site.cookie_name,
|
| 286 |
+
self.site.auth_keys,
|
| 287 |
+
self.site.cookie_domain,
|
| 288 |
+
reload=reload,
|
| 289 |
+
)
|
| 290 |
+
|
| 291 |
+
async def create_conversation(
|
| 292 |
+
self,
|
| 293 |
+
context: BrowserContext,
|
| 294 |
+
page: Page,
|
| 295 |
+
**kwargs: Any,
|
| 296 |
+
) -> str | None:
|
| 297 |
+
extra_kwargs = dict(kwargs)
|
| 298 |
+
request_id = str(extra_kwargs.pop("request_id", "") or "")
|
| 299 |
+
# 调用子类获取站点上下文
|
| 300 |
+
site_context = await self.fetch_site_context(
|
| 301 |
+
context,
|
| 302 |
+
page,
|
| 303 |
+
request_id=request_id,
|
| 304 |
+
)
|
| 305 |
+
if site_context is None:
|
| 306 |
+
logger.warning(
|
| 307 |
+
"[%s] fetch_site_context 返回 None,请确认已登录", self.type_name
|
| 308 |
+
)
|
| 309 |
+
return None
|
| 310 |
+
# 通过站点上下文创建会话
|
| 311 |
+
conv_id = await self.create_session(
|
| 312 |
+
context,
|
| 313 |
+
page,
|
| 314 |
+
site_context,
|
| 315 |
+
request_id=request_id,
|
| 316 |
+
**extra_kwargs,
|
| 317 |
+
)
|
| 318 |
+
if conv_id is None:
|
| 319 |
+
return None
|
| 320 |
+
state: dict[str, Any] = {"site_context": site_context}
|
| 321 |
+
if kwargs.get("timezone") is not None:
|
| 322 |
+
state["timezone"] = kwargs["timezone"]
|
| 323 |
+
public_model = str(kwargs.get("public_model") or "").strip()
|
| 324 |
+
if public_model:
|
| 325 |
+
state["public_model"] = public_model
|
| 326 |
+
upstream_model = str(kwargs.get("upstream_model") or "").strip()
|
| 327 |
+
if upstream_model:
|
| 328 |
+
state["upstream_model"] = upstream_model
|
| 329 |
+
self._session_state[conv_id] = state
|
| 330 |
+
logger.info(
|
| 331 |
+
"[%s] create_conversation done conv_id=%s sessions=%s",
|
| 332 |
+
self.type_name,
|
| 333 |
+
conv_id,
|
| 334 |
+
list(self._session_state.keys()),
|
| 335 |
+
)
|
| 336 |
+
return conv_id
|
| 337 |
+
|
| 338 |
+
async def stream_completion(
|
| 339 |
+
self,
|
| 340 |
+
context: BrowserContext,
|
| 341 |
+
page: Page,
|
| 342 |
+
session_id: str,
|
| 343 |
+
message: str,
|
| 344 |
+
**kwargs: Any,
|
| 345 |
+
) -> AsyncIterator[str]:
|
| 346 |
+
state = self._session_state.get(session_id)
|
| 347 |
+
if not state:
|
| 348 |
+
raise RuntimeError(f"未知会话 ID: {session_id}")
|
| 349 |
+
|
| 350 |
+
request_id: str = kwargs.get("request_id", "")
|
| 351 |
+
url = self.build_completion_url(session_id, state)
|
| 352 |
+
attachments = list(kwargs.get("attachments") or [])
|
| 353 |
+
prepared_attachments = await self.prepare_attachments(
|
| 354 |
+
context,
|
| 355 |
+
page,
|
| 356 |
+
session_id,
|
| 357 |
+
state,
|
| 358 |
+
attachments,
|
| 359 |
+
request_id=request_id,
|
| 360 |
+
)
|
| 361 |
+
body = self.build_completion_body(
|
| 362 |
+
message,
|
| 363 |
+
session_id,
|
| 364 |
+
state,
|
| 365 |
+
prepared_attachments,
|
| 366 |
+
)
|
| 367 |
+
body_json = json.dumps(body)
|
| 368 |
+
|
| 369 |
+
logger.info(
|
| 370 |
+
"[%s] stream_completion session_id=%s url=%s",
|
| 371 |
+
self.type_name,
|
| 372 |
+
session_id,
|
| 373 |
+
url,
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
out_message_ids: list[str] = []
|
| 377 |
+
transport_options = self.stream_transport_options(
|
| 378 |
+
context,
|
| 379 |
+
page,
|
| 380 |
+
session_id,
|
| 381 |
+
state,
|
| 382 |
+
request_id=request_id,
|
| 383 |
+
attachments=attachments,
|
| 384 |
+
proxy_url=kwargs.get("proxy_url"),
|
| 385 |
+
proxy_auth=kwargs.get("proxy_auth"),
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
async for text in stream_completion_via_sse(
|
| 389 |
+
context,
|
| 390 |
+
page,
|
| 391 |
+
url,
|
| 392 |
+
body_json,
|
| 393 |
+
self.parse_stream_event,
|
| 394 |
+
request_id,
|
| 395 |
+
on_http_error=self.on_http_error,
|
| 396 |
+
is_terminal_event=self.is_stream_end_event,
|
| 397 |
+
collect_message_id=out_message_ids,
|
| 398 |
+
transport=self.stream_transport(),
|
| 399 |
+
transport_options=transport_options,
|
| 400 |
+
):
|
| 401 |
+
yield text
|
| 402 |
+
|
| 403 |
+
if out_message_ids and session_id in self._session_state:
|
| 404 |
+
self.on_stream_completion_finished(session_id, out_message_ids)
|
| 405 |
+
|
| 406 |
+
# ---- 子类必须实现的 hook ----
|
| 407 |
+
|
| 408 |
+
@abstractmethod
|
| 409 |
+
async def fetch_site_context(
|
| 410 |
+
self,
|
| 411 |
+
context: BrowserContext,
|
| 412 |
+
page: Page,
|
| 413 |
+
request_id: str = "",
|
| 414 |
+
) -> dict[str, Any] | None:
|
| 415 |
+
"""获取站点上下文信息(如 org_uuid、user_id 等),失败返回 None。"""
|
| 416 |
+
del request_id
|
| 417 |
+
...
|
| 418 |
+
|
| 419 |
+
@abstractmethod
|
| 420 |
+
async def create_session(
|
| 421 |
+
self,
|
| 422 |
+
context: BrowserContext,
|
| 423 |
+
page: Page,
|
| 424 |
+
site_context: dict[str, Any],
|
| 425 |
+
**kwargs: Any,
|
| 426 |
+
) -> str | None:
|
| 427 |
+
"""调用站点 API 创建会话,返回会话 ID,失败返回 None。"""
|
| 428 |
+
...
|
| 429 |
+
|
| 430 |
+
@abstractmethod
|
| 431 |
+
def build_completion_url(self, session_id: str, state: dict[str, Any]) -> str:
|
| 432 |
+
"""根据会话状态拼出补全请求的完整 URL。"""
|
| 433 |
+
...
|
| 434 |
+
|
| 435 |
+
@abstractmethod
|
| 436 |
+
def build_completion_body(
|
| 437 |
+
self,
|
| 438 |
+
message: str,
|
| 439 |
+
session_id: str,
|
| 440 |
+
state: dict[str, Any],
|
| 441 |
+
prepared_attachments: dict[str, Any] | None = None,
|
| 442 |
+
) -> dict[str, Any]:
|
| 443 |
+
"""构建补全请求体,返回 dict(基类负责 json.dumps)。"""
|
| 444 |
+
...
|
| 445 |
+
|
| 446 |
+
@abstractmethod
|
| 447 |
+
def parse_stream_event(
|
| 448 |
+
self,
|
| 449 |
+
payload: str,
|
| 450 |
+
) -> tuple[list[str], str | None, str | None]:
|
| 451 |
+
"""
|
| 452 |
+
解析单条流式事件 payload(如 SSE data 行)。
|
| 453 |
+
返回 (texts, message_id, error_message)。
|
| 454 |
+
"""
|
| 455 |
+
...
|
| 456 |
+
|
| 457 |
+
# ---- 子类可选覆盖的 hook(有合理默认值) ----
|
| 458 |
+
|
| 459 |
+
def on_stream_completion_finished(
|
| 460 |
+
self,
|
| 461 |
+
session_id: str,
|
| 462 |
+
message_ids: list[str],
|
| 463 |
+
) -> None:
|
| 464 |
+
"""Hook:流式补全结束后调用,子类可按需用 message_ids 更新会话 state(如记续写用的父消息 id)。"""
|
| 465 |
+
|
| 466 |
+
async def prepare_attachments(
|
| 467 |
+
self,
|
| 468 |
+
context: BrowserContext,
|
| 469 |
+
page: Page,
|
| 470 |
+
session_id: str,
|
| 471 |
+
state: dict[str, Any],
|
| 472 |
+
attachments: list[InputAttachment],
|
| 473 |
+
request_id: str = "",
|
| 474 |
+
) -> dict[str, Any]:
|
| 475 |
+
del context, page, session_id, state, attachments, request_id
|
| 476 |
+
return {}
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
# ---------------------------------------------------------------------------
|
| 480 |
+
# PluginRegistry — 全局注册表
|
| 481 |
+
# ---------------------------------------------------------------------------
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
class PluginRegistry:
|
| 485 |
+
"""全局插件注册表:type_name -> AbstractPlugin。"""
|
| 486 |
+
|
| 487 |
+
_plugins: dict[str, AbstractPlugin] = {}
|
| 488 |
+
|
| 489 |
+
@classmethod
|
| 490 |
+
def register(cls, plugin: AbstractPlugin) -> None:
|
| 491 |
+
cls._plugins[plugin.type_name] = plugin
|
| 492 |
+
|
| 493 |
+
@classmethod
|
| 494 |
+
def get(cls, type_name: str) -> AbstractPlugin | None:
|
| 495 |
+
return cls._plugins.get(type_name)
|
| 496 |
+
|
| 497 |
+
@classmethod
|
| 498 |
+
def resolve_model(cls, type_name: str, model: str | None) -> ResolvedModel:
|
| 499 |
+
plugin = cls.get(type_name)
|
| 500 |
+
if plugin is None:
|
| 501 |
+
raise ValueError(f"Unknown provider: {type_name}")
|
| 502 |
+
return plugin.resolve_model(model)
|
| 503 |
+
|
| 504 |
+
@classmethod
|
| 505 |
+
def model_metadata(cls, type_name: str) -> dict[str, Any]:
|
| 506 |
+
plugin = cls.get(type_name)
|
| 507 |
+
if plugin is None:
|
| 508 |
+
raise ValueError(f"Unknown provider: {type_name}")
|
| 509 |
+
mapping = plugin.listed_model_mapping()
|
| 510 |
+
return {
|
| 511 |
+
"provider": type_name,
|
| 512 |
+
"public_models": list(mapping.keys()),
|
| 513 |
+
"model_mapping": mapping,
|
| 514 |
+
"default_model": plugin.default_public_model(),
|
| 515 |
+
}
|
| 516 |
+
|
| 517 |
+
@classmethod
|
| 518 |
+
def all_types(cls) -> list[str]:
|
| 519 |
+
return list(cls._plugins.keys())
|
core/plugin/claude.py
ADDED
|
@@ -0,0 +1,756 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Claude 插件:仅实现站点特有的上下文获取、会话创建、请求体构建、SSE 解析和限流处理。
|
| 3 |
+
其余编排逻辑(create_page / apply_auth / stream_completion 流程)全部由 BaseSitePlugin 完成。
|
| 4 |
+
调试时可在 config.yaml 的 claude.start_url、claude.api_base 指向 mock。
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import datetime
|
| 8 |
+
import json
|
| 9 |
+
import logging
|
| 10 |
+
import re
|
| 11 |
+
import time
|
| 12 |
+
from asyncio import Lock
|
| 13 |
+
from typing import Any
|
| 14 |
+
from urllib.parse import urlparse
|
| 15 |
+
|
| 16 |
+
from playwright.async_api import BrowserContext, Page
|
| 17 |
+
|
| 18 |
+
from core.api.schemas import InputAttachment
|
| 19 |
+
from core.constants import TIMEZONE
|
| 20 |
+
from core.plugin.base import BaseSitePlugin, PluginRegistry, SiteConfig
|
| 21 |
+
from core.plugin.errors import BrowserResourceInvalidError
|
| 22 |
+
from core.plugin.helpers import (
|
| 23 |
+
_classify_browser_resource_error,
|
| 24 |
+
clear_cookies_for_domain,
|
| 25 |
+
clear_page_storage_for_switch,
|
| 26 |
+
request_json_via_context_request,
|
| 27 |
+
safe_page_reload,
|
| 28 |
+
upload_file_via_context_request,
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
logger = logging.getLogger(__name__)
|
| 32 |
+
|
| 33 |
+
# Probe cache: skip redundant ensure_request_ready probes within this window.
|
| 34 |
+
_PROBE_CACHE_TTL_SECONDS = 60.0
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _truncate_url_for_log(value: str, limit: int = 200) -> str:
|
| 38 |
+
if len(value) <= limit:
|
| 39 |
+
return value
|
| 40 |
+
return value[:limit] + "..."
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _safe_page_url(page: Page) -> str:
|
| 44 |
+
try:
|
| 45 |
+
return page.url or ""
|
| 46 |
+
except Exception:
|
| 47 |
+
return ""
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# ---------------------------------------------------------------------------
|
| 51 |
+
# 站点特有:请求体 & SSE 解析
|
| 52 |
+
# ---------------------------------------------------------------------------
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _is_thinking_model(public_model: str) -> bool:
|
| 56 |
+
"""Any model ending with -thinking enables extended thinking (paprika_mode)."""
|
| 57 |
+
return public_model.endswith("-thinking")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _base_upstream_model(public_model: str) -> str:
|
| 61 |
+
"""Strip -thinking suffix to get the upstream model ID for Claude Web."""
|
| 62 |
+
return public_model.removesuffix("-thinking")
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _default_completion_body(
|
| 66 |
+
message: str,
|
| 67 |
+
*,
|
| 68 |
+
is_follow_up: bool = False,
|
| 69 |
+
timezone: str = TIMEZONE,
|
| 70 |
+
public_model: str = "",
|
| 71 |
+
) -> dict[str, Any]:
|
| 72 |
+
"""构建 Claude completion 请求体。续写时不带 create_conversation_params,否则 API 返回 400。"""
|
| 73 |
+
body: dict[str, Any] = {
|
| 74 |
+
"prompt": message,
|
| 75 |
+
"timezone": timezone,
|
| 76 |
+
"personalized_styles": [
|
| 77 |
+
{
|
| 78 |
+
"type": "default",
|
| 79 |
+
"key": "Default",
|
| 80 |
+
"name": "Normal",
|
| 81 |
+
"nameKey": "normal_style_name",
|
| 82 |
+
"prompt": "Normal\n",
|
| 83 |
+
"summary": "Default responses from Claude",
|
| 84 |
+
"summaryKey": "normal_style_summary",
|
| 85 |
+
"isDefault": True,
|
| 86 |
+
}
|
| 87 |
+
],
|
| 88 |
+
"locale": "en-US",
|
| 89 |
+
"tools": [
|
| 90 |
+
{"type": "web_search_v0", "name": "web_search"},
|
| 91 |
+
{"type": "artifacts_v0", "name": "artifacts"},
|
| 92 |
+
{"type": "repl_v0", "name": "repl"},
|
| 93 |
+
{"type": "widget", "name": "weather_fetch"},
|
| 94 |
+
{"type": "widget", "name": "recipe_display_v0"},
|
| 95 |
+
{"type": "widget", "name": "places_map_display_v0"},
|
| 96 |
+
{"type": "widget", "name": "message_compose_v1"},
|
| 97 |
+
{"type": "widget", "name": "ask_user_input_v0"},
|
| 98 |
+
{"type": "widget", "name": "places_search"},
|
| 99 |
+
{"type": "widget", "name": "fetch_sports_data"},
|
| 100 |
+
],
|
| 101 |
+
"attachments": [],
|
| 102 |
+
"files": [],
|
| 103 |
+
"sync_sources": [],
|
| 104 |
+
"rendering_mode": "messages",
|
| 105 |
+
}
|
| 106 |
+
if _is_thinking_model(public_model):
|
| 107 |
+
body["model"] = _base_upstream_model(public_model)
|
| 108 |
+
if not is_follow_up:
|
| 109 |
+
body["create_conversation_params"] = {
|
| 110 |
+
"name": "",
|
| 111 |
+
"include_conversation_preferences": True,
|
| 112 |
+
"is_temporary": False,
|
| 113 |
+
}
|
| 114 |
+
if _is_thinking_model(public_model):
|
| 115 |
+
body["create_conversation_params"]["paprika_mode"] = "extended"
|
| 116 |
+
return body
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def _parse_one_sse_event(payload: str) -> tuple[list[str], str | None, str | None]:
|
| 120 |
+
"""解析单条 Claude SSE data 行,返回 (texts, message_id, error)。"""
|
| 121 |
+
result: list[str] = []
|
| 122 |
+
message_id: str | None = None
|
| 123 |
+
error_message: str | None = None
|
| 124 |
+
try:
|
| 125 |
+
obj = json.loads(payload)
|
| 126 |
+
if not isinstance(obj, dict):
|
| 127 |
+
return (result, message_id, error_message)
|
| 128 |
+
kind = obj.get("type")
|
| 129 |
+
if kind == "error":
|
| 130 |
+
err = obj.get("error") or {}
|
| 131 |
+
error_message = err.get("message") or err.get("type") or "Unknown error"
|
| 132 |
+
return (result, message_id, error_message)
|
| 133 |
+
if "text" in obj and obj.get("text"):
|
| 134 |
+
result.append(str(obj["text"]))
|
| 135 |
+
elif kind == "content_block_delta":
|
| 136 |
+
delta = obj.get("delta")
|
| 137 |
+
if isinstance(delta, dict) and "text" in delta:
|
| 138 |
+
result.append(str(delta["text"]))
|
| 139 |
+
elif isinstance(delta, str) and delta:
|
| 140 |
+
result.append(delta)
|
| 141 |
+
elif kind == "message_start":
|
| 142 |
+
msg = obj.get("message")
|
| 143 |
+
if isinstance(msg, dict):
|
| 144 |
+
for key in ("uuid", "id"):
|
| 145 |
+
if msg.get(key):
|
| 146 |
+
message_id = str(msg[key])
|
| 147 |
+
break
|
| 148 |
+
if not message_id:
|
| 149 |
+
mid = (
|
| 150 |
+
obj.get("message_uuid") or obj.get("uuid") or obj.get("message_id")
|
| 151 |
+
)
|
| 152 |
+
if mid:
|
| 153 |
+
message_id = str(mid)
|
| 154 |
+
elif (
|
| 155 |
+
kind
|
| 156 |
+
and kind
|
| 157 |
+
not in (
|
| 158 |
+
"ping",
|
| 159 |
+
"content_block_start",
|
| 160 |
+
"content_block_stop",
|
| 161 |
+
"message_stop",
|
| 162 |
+
"message_delta",
|
| 163 |
+
"message_limit",
|
| 164 |
+
)
|
| 165 |
+
and not result
|
| 166 |
+
):
|
| 167 |
+
logger.debug(
|
| 168 |
+
"SSE 未解析出正文 type=%s payload=%s",
|
| 169 |
+
kind,
|
| 170 |
+
payload[:200] if len(payload) > 200 else payload,
|
| 171 |
+
)
|
| 172 |
+
except json.JSONDecodeError:
|
| 173 |
+
pass
|
| 174 |
+
return (result, message_id, error_message)
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def _is_terminal_sse_event(payload: str) -> bool:
|
| 178 |
+
"""Claude 正常流结束时会发送 message_stop。"""
|
| 179 |
+
try:
|
| 180 |
+
obj = json.loads(payload)
|
| 181 |
+
except json.JSONDecodeError:
|
| 182 |
+
return False
|
| 183 |
+
return isinstance(obj, dict) and obj.get("type") == "message_stop"
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
# ---------------------------------------------------------------------------
|
| 187 |
+
# ClaudePlugin — 只需声明配置 + 实现 5 个 hook
|
| 188 |
+
# ---------------------------------------------------------------------------
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class ClaudePlugin(BaseSitePlugin):
|
| 192 |
+
"""Claude Web2API plugin. auth must include sessionKey."""
|
| 193 |
+
|
| 194 |
+
type_name = "claude"
|
| 195 |
+
DEFAULT_MODEL_MAPPING = {
|
| 196 |
+
"claude-sonnet-4-6": "claude-sonnet-4-6",
|
| 197 |
+
"claude-sonnet-4-5": "claude-sonnet-4-5",
|
| 198 |
+
"claude-sonnet-4-5-thinking": "claude-sonnet-4-5-thinking",
|
| 199 |
+
"claude-sonnet-4-6-thinking": "claude-sonnet-4-6-thinking",
|
| 200 |
+
"claude-haiku-4-5": "claude-haiku-4-5",
|
| 201 |
+
"claude-haiku-4-5-thinking": "claude-haiku-4-5-thinking",
|
| 202 |
+
"claude-opus-4-6": "claude-opus-4-6",
|
| 203 |
+
"claude-opus-4-6-thinking": "claude-opus-4-6-thinking",
|
| 204 |
+
}
|
| 205 |
+
# Models that require a Claude Pro subscription.
|
| 206 |
+
PRO_MODELS = frozenset({
|
| 207 |
+
"claude-haiku-4-5",
|
| 208 |
+
"claude-haiku-4-5-thinking",
|
| 209 |
+
"claude-opus-4-6",
|
| 210 |
+
"claude-opus-4-6-thinking",
|
| 211 |
+
})
|
| 212 |
+
MODEL_ALIASES = {
|
| 213 |
+
"s4": "claude-sonnet-4-6",
|
| 214 |
+
# dot-notation aliases (e.g. 4.6 / 4.5) → canonical dash form
|
| 215 |
+
"claude-sonnet-4.6": "claude-sonnet-4-6",
|
| 216 |
+
"claude-sonnet-4.5": "claude-sonnet-4-5",
|
| 217 |
+
"claude-opus-4.6": "claude-opus-4-6",
|
| 218 |
+
"claude-haiku-4.5": "claude-haiku-4-5",
|
| 219 |
+
# thinking variants
|
| 220 |
+
"claude-sonnet-4.6-thinking": "claude-sonnet-4-6-thinking",
|
| 221 |
+
"claude-sonnet-4.5-thinking": "claude-sonnet-4-5-thinking",
|
| 222 |
+
"claude-opus-4.6-thinking": "claude-opus-4-6-thinking",
|
| 223 |
+
"claude-haiku-4.5-thinking": "claude-haiku-4-5-thinking",
|
| 224 |
+
}
|
| 225 |
+
|
| 226 |
+
site = SiteConfig(
|
| 227 |
+
start_url="https://claude.ai/login",
|
| 228 |
+
api_base="https://claude.ai/api",
|
| 229 |
+
cookie_name="sessionKey",
|
| 230 |
+
cookie_domain=".claude.ai",
|
| 231 |
+
auth_keys=["sessionKey", "session_key"],
|
| 232 |
+
config_section="claude",
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
def __init__(self) -> None:
|
| 236 |
+
super().__init__()
|
| 237 |
+
# Per-page probe cache: page id -> last successful probe timestamp
|
| 238 |
+
self._probe_ok_at: dict[int, float] = {}
|
| 239 |
+
# Per-page navigation lock: prevents concurrent page.goto/reload
|
| 240 |
+
self._nav_locks: dict[int, Lock] = {}
|
| 241 |
+
# Per-page site_context cache: page id -> (context_dict, timestamp)
|
| 242 |
+
self._site_context_cache: dict[int, tuple[dict[str, Any], float]] = {}
|
| 243 |
+
|
| 244 |
+
_SITE_CONTEXT_TTL = 300.0 # 5 minutes
|
| 245 |
+
|
| 246 |
+
def model_mapping(self) -> dict[str, str] | None:
|
| 247 |
+
configured = super().model_mapping() or {}
|
| 248 |
+
mapping = dict(self.DEFAULT_MODEL_MAPPING)
|
| 249 |
+
mapping.update(configured)
|
| 250 |
+
for alias, upstream_model in self.MODEL_ALIASES.items():
|
| 251 |
+
mapping.setdefault(alias, upstream_model)
|
| 252 |
+
return mapping
|
| 253 |
+
|
| 254 |
+
def listed_model_mapping(self) -> dict[str, str]:
|
| 255 |
+
configured = super().model_mapping() or {}
|
| 256 |
+
mapping = dict(self.DEFAULT_MODEL_MAPPING)
|
| 257 |
+
mapping.update(configured)
|
| 258 |
+
for alias in self.MODEL_ALIASES:
|
| 259 |
+
mapping.pop(alias, None)
|
| 260 |
+
return mapping
|
| 261 |
+
|
| 262 |
+
async def apply_auth(
|
| 263 |
+
self,
|
| 264 |
+
context: BrowserContext,
|
| 265 |
+
page: Page,
|
| 266 |
+
auth: dict[str, Any],
|
| 267 |
+
*,
|
| 268 |
+
reload: bool = True,
|
| 269 |
+
) -> None:
|
| 270 |
+
await clear_cookies_for_domain(context, self.site.cookie_domain)
|
| 271 |
+
await clear_page_storage_for_switch(page)
|
| 272 |
+
await super().apply_auth(context, page, auth, reload=False)
|
| 273 |
+
if reload:
|
| 274 |
+
await safe_page_reload(page, url=self.start_url)
|
| 275 |
+
|
| 276 |
+
def _is_claude_domain(self, url: str) -> bool:
|
| 277 |
+
host = (urlparse(url).hostname or "").lower().lstrip(".")
|
| 278 |
+
if not host:
|
| 279 |
+
return False
|
| 280 |
+
allowed_hosts = {"claude.ai", "claude.com"}
|
| 281 |
+
for configured_url in (self.start_url, self.api_base):
|
| 282 |
+
configured_host = (urlparse(configured_url).hostname or "").lower().lstrip(".")
|
| 283 |
+
if configured_host:
|
| 284 |
+
allowed_hosts.add(configured_host)
|
| 285 |
+
return any(host == allowed or host.endswith(f".{allowed}") for allowed in allowed_hosts)
|
| 286 |
+
|
| 287 |
+
def _suspicious_page_reason(self, url: str) -> str | None:
|
| 288 |
+
if not url:
|
| 289 |
+
return "empty_page_url"
|
| 290 |
+
parsed = urlparse(url)
|
| 291 |
+
if not parsed.scheme or not parsed.netloc:
|
| 292 |
+
return "invalid_page_url"
|
| 293 |
+
if not self._is_claude_domain(url):
|
| 294 |
+
return "non_claude_domain"
|
| 295 |
+
path = parsed.path or "/"
|
| 296 |
+
if path == "/new" or path.startswith("/new/"):
|
| 297 |
+
return "new_chat_page"
|
| 298 |
+
if path in {"/logout", "/auth", "/signed-out"}:
|
| 299 |
+
return "logout_page"
|
| 300 |
+
if path.startswith("/signup"):
|
| 301 |
+
return "signup_page"
|
| 302 |
+
if path == "/app-unavailable-in-region" or path.startswith(
|
| 303 |
+
"/app-unavailable-in-region/"
|
| 304 |
+
):
|
| 305 |
+
return "app_unavailable_in_region"
|
| 306 |
+
return None
|
| 307 |
+
|
| 308 |
+
def _is_suspicious_page_url(self, url: str) -> bool:
|
| 309 |
+
return self._suspicious_page_reason(url) is not None
|
| 310 |
+
|
| 311 |
+
async def _probe_request_ready(
|
| 312 |
+
self,
|
| 313 |
+
context: BrowserContext,
|
| 314 |
+
page: Page,
|
| 315 |
+
*,
|
| 316 |
+
request_id: str,
|
| 317 |
+
) -> tuple[bool, str | None]:
|
| 318 |
+
current_url = _safe_page_url(page)
|
| 319 |
+
suspicious_reason = self._suspicious_page_reason(current_url)
|
| 320 |
+
if suspicious_reason is not None:
|
| 321 |
+
logger.warning(
|
| 322 |
+
"[%s] request-ready probe sees suspicious page url request_id=%s reason=%s page.url=%s",
|
| 323 |
+
self.type_name,
|
| 324 |
+
request_id,
|
| 325 |
+
suspicious_reason,
|
| 326 |
+
_truncate_url_for_log(current_url),
|
| 327 |
+
)
|
| 328 |
+
return (False, suspicious_reason)
|
| 329 |
+
try:
|
| 330 |
+
site_context = await self.fetch_site_context(
|
| 331 |
+
context,
|
| 332 |
+
page,
|
| 333 |
+
request_id=request_id,
|
| 334 |
+
)
|
| 335 |
+
except BrowserResourceInvalidError:
|
| 336 |
+
raise
|
| 337 |
+
except Exception as e:
|
| 338 |
+
logger.warning(
|
| 339 |
+
"[%s] request-ready probe failed request_id=%s page.url=%s err=%s",
|
| 340 |
+
self.type_name,
|
| 341 |
+
request_id,
|
| 342 |
+
_truncate_url_for_log(current_url),
|
| 343 |
+
str(e)[:240],
|
| 344 |
+
)
|
| 345 |
+
return (False, f"control_probe_error:{str(e)[:120]}")
|
| 346 |
+
return (site_context is not None, None if site_context is not None else "account_probe_empty")
|
| 347 |
+
|
| 348 |
+
async def ensure_request_ready(
|
| 349 |
+
self,
|
| 350 |
+
context: BrowserContext,
|
| 351 |
+
page: Page,
|
| 352 |
+
*,
|
| 353 |
+
request_id: str = "",
|
| 354 |
+
session_id: str | None = None,
|
| 355 |
+
phase: str = "",
|
| 356 |
+
account_id: str = "",
|
| 357 |
+
) -> None:
|
| 358 |
+
initial_url = _safe_page_url(page)
|
| 359 |
+
current_url = initial_url
|
| 360 |
+
probe_request_id = request_id or f"ready:{phase or 'request'}"
|
| 361 |
+
action = "none"
|
| 362 |
+
probe_before = False
|
| 363 |
+
probe_after = False
|
| 364 |
+
probe_reason: str | None = None
|
| 365 |
+
page_id = id(page)
|
| 366 |
+
|
| 367 |
+
# Fast path (lock-free): page URL is clean and probe succeeded recently.
|
| 368 |
+
suspicious_reason = self._suspicious_page_reason(current_url)
|
| 369 |
+
if suspicious_reason is None:
|
| 370 |
+
last_ok = self._probe_ok_at.get(page_id, 0.0)
|
| 371 |
+
if (time.time() - last_ok) < _PROBE_CACHE_TTL_SECONDS:
|
| 372 |
+
return
|
| 373 |
+
if suspicious_reason == "app_unavailable_in_region":
|
| 374 |
+
raise RuntimeError(
|
| 375 |
+
"Claude page is app-unavailable-in-region; the runtime IP or region cannot reach Claude Web"
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
# Slow path: acquire per-page nav lock to prevent concurrent navigation.
|
| 379 |
+
nav_lock = self._nav_locks.setdefault(page_id, Lock())
|
| 380 |
+
async with nav_lock:
|
| 381 |
+
# Re-check after acquiring lock — another request may have fixed the page.
|
| 382 |
+
current_url = _safe_page_url(page)
|
| 383 |
+
suspicious_reason = self._suspicious_page_reason(current_url)
|
| 384 |
+
if suspicious_reason is None:
|
| 385 |
+
last_ok = self._probe_ok_at.get(page_id, 0.0)
|
| 386 |
+
if (time.time() - last_ok) < _PROBE_CACHE_TTL_SECONDS:
|
| 387 |
+
return
|
| 388 |
+
if suspicious_reason == "app_unavailable_in_region":
|
| 389 |
+
raise RuntimeError(
|
| 390 |
+
"Claude page is app-unavailable-in-region; the runtime IP or region cannot reach Claude Web"
|
| 391 |
+
)
|
| 392 |
+
|
| 393 |
+
try:
|
| 394 |
+
if suspicious_reason is not None:
|
| 395 |
+
action = "goto"
|
| 396 |
+
try:
|
| 397 |
+
await safe_page_reload(page, url=self.start_url)
|
| 398 |
+
except Exception as e:
|
| 399 |
+
classified = _classify_browser_resource_error(
|
| 400 |
+
e,
|
| 401 |
+
helper_name="claude.ensure_request_ready",
|
| 402 |
+
operation="preflight",
|
| 403 |
+
stage="goto_start_url",
|
| 404 |
+
request_url=self.start_url,
|
| 405 |
+
page=page,
|
| 406 |
+
request_id=request_id or None,
|
| 407 |
+
stream_phase=phase or None,
|
| 408 |
+
)
|
| 409 |
+
if classified is not None:
|
| 410 |
+
raise classified from e
|
| 411 |
+
raise
|
| 412 |
+
current_url = _safe_page_url(page)
|
| 413 |
+
suspicious_reason = self._suspicious_page_reason(current_url)
|
| 414 |
+
if suspicious_reason == "app_unavailable_in_region":
|
| 415 |
+
probe_reason = suspicious_reason
|
| 416 |
+
raise RuntimeError(
|
| 417 |
+
"Claude page is app-unavailable-in-region after goto; the runtime IP or region cannot reach Claude Web"
|
| 418 |
+
)
|
| 419 |
+
|
| 420 |
+
probe_before = self._suspicious_page_reason(current_url) is None
|
| 421 |
+
if probe_before:
|
| 422 |
+
probe_after, probe_reason = await self._probe_request_ready(
|
| 423 |
+
context,
|
| 424 |
+
page,
|
| 425 |
+
request_id=f"{probe_request_id}:initial",
|
| 426 |
+
)
|
| 427 |
+
if probe_after:
|
| 428 |
+
self._probe_ok_at[page_id] = time.time()
|
| 429 |
+
return
|
| 430 |
+
if probe_reason == "app_unavailable_in_region":
|
| 431 |
+
raise RuntimeError(
|
| 432 |
+
"Claude page is app-unavailable-in-region during control probe; the runtime IP or region cannot reach Claude Web"
|
| 433 |
+
)
|
| 434 |
+
else:
|
| 435 |
+
probe_after = False
|
| 436 |
+
probe_reason = suspicious_reason or "suspicious_page_url"
|
| 437 |
+
|
| 438 |
+
action = "reload"
|
| 439 |
+
try:
|
| 440 |
+
await safe_page_reload(page)
|
| 441 |
+
except Exception as e:
|
| 442 |
+
classified = _classify_browser_resource_error(
|
| 443 |
+
e,
|
| 444 |
+
helper_name="claude.ensure_request_ready",
|
| 445 |
+
operation="preflight",
|
| 446 |
+
stage="reload",
|
| 447 |
+
request_url=current_url or self.start_url,
|
| 448 |
+
page=page,
|
| 449 |
+
request_id=request_id or None,
|
| 450 |
+
stream_phase=phase or None,
|
| 451 |
+
)
|
| 452 |
+
if classified is not None:
|
| 453 |
+
raise classified from e
|
| 454 |
+
raise
|
| 455 |
+
current_url = _safe_page_url(page)
|
| 456 |
+
if self._suspicious_page_reason(current_url) == "app_unavailable_in_region":
|
| 457 |
+
probe_reason = "app_unavailable_in_region"
|
| 458 |
+
raise RuntimeError(
|
| 459 |
+
"Claude page is app-unavailable-in-region after reload; the runtime IP or region cannot reach Claude Web"
|
| 460 |
+
)
|
| 461 |
+
probe_after, probe_reason = await self._probe_request_ready(
|
| 462 |
+
context,
|
| 463 |
+
page,
|
| 464 |
+
request_id=f"{probe_request_id}:reload",
|
| 465 |
+
)
|
| 466 |
+
if probe_after:
|
| 467 |
+
self._probe_ok_at[page_id] = time.time()
|
| 468 |
+
return
|
| 469 |
+
if probe_reason == "app_unavailable_in_region":
|
| 470 |
+
raise RuntimeError(
|
| 471 |
+
"Claude page is app-unavailable-in-region after reload probe; the runtime IP or region cannot reach Claude Web"
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
action = "goto"
|
| 475 |
+
try:
|
| 476 |
+
await safe_page_reload(page, url=self.start_url)
|
| 477 |
+
except Exception as e:
|
| 478 |
+
classified = _classify_browser_resource_error(
|
| 479 |
+
e,
|
| 480 |
+
helper_name="claude.ensure_request_ready",
|
| 481 |
+
operation="preflight",
|
| 482 |
+
stage="goto_start_url",
|
| 483 |
+
request_url=self.start_url,
|
| 484 |
+
page=page,
|
| 485 |
+
request_id=request_id or None,
|
| 486 |
+
stream_phase=phase or None,
|
| 487 |
+
)
|
| 488 |
+
if classified is not None:
|
| 489 |
+
raise classified from e
|
| 490 |
+
raise
|
| 491 |
+
current_url = _safe_page_url(page)
|
| 492 |
+
if self._suspicious_page_reason(current_url) == "app_unavailable_in_region":
|
| 493 |
+
probe_reason = "app_unavailable_in_region"
|
| 494 |
+
raise RuntimeError(
|
| 495 |
+
"Claude page is app-unavailable-in-region after page correction; the runtime IP or region cannot reach Claude Web"
|
| 496 |
+
)
|
| 497 |
+
probe_after, probe_reason = await self._probe_request_ready(
|
| 498 |
+
context,
|
| 499 |
+
page,
|
| 500 |
+
request_id=f"{probe_request_id}:goto",
|
| 501 |
+
)
|
| 502 |
+
if not probe_after:
|
| 503 |
+
if probe_reason == "suspicious_page_url":
|
| 504 |
+
raise BrowserResourceInvalidError(
|
| 505 |
+
"Claude request preflight failed after page correction: suspicious_page_url",
|
| 506 |
+
helper_name="claude.ensure_request_ready",
|
| 507 |
+
operation="preflight",
|
| 508 |
+
stage="probe_after_goto",
|
| 509 |
+
resource_hint="page",
|
| 510 |
+
request_url=self.start_url,
|
| 511 |
+
page_url=current_url,
|
| 512 |
+
request_id=request_id or None,
|
| 513 |
+
stream_phase=phase or None,
|
| 514 |
+
)
|
| 515 |
+
raise RuntimeError(
|
| 516 |
+
f"Claude request control probe failed after page correction: {probe_reason or 'unknown'}"
|
| 517 |
+
)
|
| 518 |
+
self._probe_ok_at[page_id] = time.time()
|
| 519 |
+
finally:
|
| 520 |
+
logger.info(
|
| 521 |
+
"[%s] ensure_request_ready phase=%s account=%s session_id=%s action=%s probe_before=%s probe_after=%s probe_reason=%s page.url.before=%s page.url.after=%s",
|
| 522 |
+
self.type_name,
|
| 523 |
+
phase,
|
| 524 |
+
account_id,
|
| 525 |
+
session_id,
|
| 526 |
+
action,
|
| 527 |
+
probe_before,
|
| 528 |
+
probe_after,
|
| 529 |
+
probe_reason,
|
| 530 |
+
_truncate_url_for_log(initial_url),
|
| 531 |
+
_truncate_url_for_log(current_url),
|
| 532 |
+
)
|
| 533 |
+
|
| 534 |
+
# ---- 5 个必须实现的 hook ----
|
| 535 |
+
|
| 536 |
+
async def fetch_site_context(
|
| 537 |
+
self,
|
| 538 |
+
context: BrowserContext,
|
| 539 |
+
page: Page,
|
| 540 |
+
request_id: str = "",
|
| 541 |
+
) -> dict[str, Any] | None:
|
| 542 |
+
page_id = id(page)
|
| 543 |
+
cached = self._site_context_cache.get(page_id)
|
| 544 |
+
if cached is not None:
|
| 545 |
+
ctx, ts = cached
|
| 546 |
+
if (time.time() - ts) < self._SITE_CONTEXT_TTL:
|
| 547 |
+
return ctx
|
| 548 |
+
resp = await request_json_via_context_request(
|
| 549 |
+
context,
|
| 550 |
+
page,
|
| 551 |
+
f"{self.api_base}/account",
|
| 552 |
+
timeout_ms=15000,
|
| 553 |
+
request_id=request_id or "site-context",
|
| 554 |
+
)
|
| 555 |
+
if int(resp.get("status") or 0) != 200:
|
| 556 |
+
text = str(resp.get("text") or "")[:500]
|
| 557 |
+
logger.warning(
|
| 558 |
+
"[%s] fetch_site_context 失败 status=%s url=%s body=%s",
|
| 559 |
+
self.type_name,
|
| 560 |
+
resp.get("status"),
|
| 561 |
+
resp.get("url"),
|
| 562 |
+
text,
|
| 563 |
+
)
|
| 564 |
+
return None
|
| 565 |
+
data = resp.get("json")
|
| 566 |
+
if not isinstance(data, dict):
|
| 567 |
+
logger.warning("[%s] fetch_site_context 返回非 JSON", self.type_name)
|
| 568 |
+
return None
|
| 569 |
+
memberships = data.get("memberships") or []
|
| 570 |
+
if not memberships:
|
| 571 |
+
return None
|
| 572 |
+
org = memberships[0].get("organization") or {}
|
| 573 |
+
org_uuid = org.get("uuid")
|
| 574 |
+
if org_uuid:
|
| 575 |
+
result = {"org_uuid": org_uuid}
|
| 576 |
+
self._site_context_cache[page_id] = (result, time.time())
|
| 577 |
+
return result
|
| 578 |
+
return None
|
| 579 |
+
|
| 580 |
+
async def create_session(
|
| 581 |
+
self,
|
| 582 |
+
context: BrowserContext,
|
| 583 |
+
page: Page,
|
| 584 |
+
site_context: dict[str, Any],
|
| 585 |
+
**kwargs: Any,
|
| 586 |
+
) -> str | None:
|
| 587 |
+
org_uuid = site_context["org_uuid"]
|
| 588 |
+
public_model = str(kwargs.get("public_model") or "").strip()
|
| 589 |
+
upstream_model = str(kwargs.get("upstream_model") or "").strip()
|
| 590 |
+
if not upstream_model:
|
| 591 |
+
upstream_model = self.resolve_model(None).upstream_model
|
| 592 |
+
payload: dict[str, Any] = {
|
| 593 |
+
"name": "",
|
| 594 |
+
"model": (
|
| 595 |
+
_base_upstream_model(public_model)
|
| 596 |
+
if _is_thinking_model(public_model)
|
| 597 |
+
else upstream_model
|
| 598 |
+
),
|
| 599 |
+
}
|
| 600 |
+
if _is_thinking_model(public_model):
|
| 601 |
+
payload["paprika_mode"] = "extended"
|
| 602 |
+
url = f"{self.api_base}/organizations/{org_uuid}/chat_conversations"
|
| 603 |
+
request_id = str(kwargs.get("request_id") or "").strip()
|
| 604 |
+
resp = await request_json_via_context_request(
|
| 605 |
+
context,
|
| 606 |
+
page,
|
| 607 |
+
url,
|
| 608 |
+
method="POST",
|
| 609 |
+
body=json.dumps(payload),
|
| 610 |
+
headers={"Content-Type": "application/json"},
|
| 611 |
+
timeout_ms=15000,
|
| 612 |
+
request_id=request_id or f"create-session:{org_uuid}",
|
| 613 |
+
)
|
| 614 |
+
status = int(resp.get("status") or 0)
|
| 615 |
+
if status not in (200, 201):
|
| 616 |
+
text = str(resp.get("text") or "")[:500]
|
| 617 |
+
logger.warning("创建会话失败 %s: %s", status, text)
|
| 618 |
+
return None
|
| 619 |
+
data = resp.get("json")
|
| 620 |
+
if not isinstance(data, dict):
|
| 621 |
+
logger.warning("创建会话返回非 JSON")
|
| 622 |
+
return None
|
| 623 |
+
return data.get("uuid")
|
| 624 |
+
|
| 625 |
+
def build_completion_url(self, session_id: str, state: dict[str, Any]) -> str:
|
| 626 |
+
org_uuid = state["site_context"]["org_uuid"]
|
| 627 |
+
return f"{self.api_base}/organizations/{org_uuid}/chat_conversations/{session_id}/completion"
|
| 628 |
+
|
| 629 |
+
# 构建请求体
|
| 630 |
+
def build_completion_body(
|
| 631 |
+
self,
|
| 632 |
+
message: str,
|
| 633 |
+
session_id: str,
|
| 634 |
+
state: dict[str, Any],
|
| 635 |
+
prepared_attachments: dict[str, Any] | None = None,
|
| 636 |
+
) -> dict[str, Any]:
|
| 637 |
+
parent = state.get("parent_message_uuid")
|
| 638 |
+
tz = state.get("timezone") or TIMEZONE
|
| 639 |
+
public_model = str(state.get("public_model") or "").strip()
|
| 640 |
+
body = _default_completion_body(
|
| 641 |
+
message,
|
| 642 |
+
is_follow_up=parent is not None,
|
| 643 |
+
timezone=tz,
|
| 644 |
+
public_model=public_model,
|
| 645 |
+
)
|
| 646 |
+
if parent:
|
| 647 |
+
body["parent_message_uuid"] = parent
|
| 648 |
+
if prepared_attachments:
|
| 649 |
+
body.update(prepared_attachments)
|
| 650 |
+
return body
|
| 651 |
+
|
| 652 |
+
def parse_stream_event(
|
| 653 |
+
self,
|
| 654 |
+
payload: str,
|
| 655 |
+
) -> tuple[list[str], str | None, str | None]:
|
| 656 |
+
return _parse_one_sse_event(payload)
|
| 657 |
+
|
| 658 |
+
def is_stream_end_event(self, payload: str) -> bool:
|
| 659 |
+
return _is_terminal_sse_event(payload)
|
| 660 |
+
|
| 661 |
+
# 处理错误
|
| 662 |
+
def stream_transport(self) -> str:
|
| 663 |
+
return "context_request"
|
| 664 |
+
|
| 665 |
+
def on_http_error(
|
| 666 |
+
self,
|
| 667 |
+
message: str,
|
| 668 |
+
headers: dict[str, str] | None,
|
| 669 |
+
) -> int | None:
|
| 670 |
+
if "429" not in message:
|
| 671 |
+
return None
|
| 672 |
+
if headers:
|
| 673 |
+
reset = headers.get("anthropic-ratelimit-requests-reset") or headers.get(
|
| 674 |
+
"Anthropic-Ratelimit-Requests-Reset"
|
| 675 |
+
)
|
| 676 |
+
if reset:
|
| 677 |
+
try:
|
| 678 |
+
s = str(reset).strip()
|
| 679 |
+
if s.endswith("Z"):
|
| 680 |
+
s = s[:-1] + "+00:00"
|
| 681 |
+
dt = datetime.datetime.fromisoformat(s)
|
| 682 |
+
if dt.tzinfo is None:
|
| 683 |
+
dt = dt.replace(tzinfo=datetime.timezone.utc)
|
| 684 |
+
return int(dt.timestamp())
|
| 685 |
+
except Exception:
|
| 686 |
+
pass
|
| 687 |
+
return int(time.time()) + 5 * 3600
|
| 688 |
+
|
| 689 |
+
_UUID_RE = re.compile(
|
| 690 |
+
r"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"
|
| 691 |
+
)
|
| 692 |
+
|
| 693 |
+
def on_stream_completion_finished(
|
| 694 |
+
self,
|
| 695 |
+
session_id: str,
|
| 696 |
+
message_ids: list[str],
|
| 697 |
+
) -> None:
|
| 698 |
+
"""Claude 多轮续写需要 parent_message_uuid,取本轮最后一条消息 UUID 写入 state。"""
|
| 699 |
+
last_uuid = next(
|
| 700 |
+
(m for m in reversed(message_ids) if self._UUID_RE.match(m)), None
|
| 701 |
+
)
|
| 702 |
+
if last_uuid and session_id in self._session_state:
|
| 703 |
+
self._session_state[session_id]["parent_message_uuid"] = last_uuid
|
| 704 |
+
logger.info(
|
| 705 |
+
"[%s] updated parent_message_uuid=%s", self.type_name, last_uuid
|
| 706 |
+
)
|
| 707 |
+
|
| 708 |
+
async def prepare_attachments(
|
| 709 |
+
self,
|
| 710 |
+
context: BrowserContext,
|
| 711 |
+
page: Page,
|
| 712 |
+
session_id: str,
|
| 713 |
+
state: dict[str, Any],
|
| 714 |
+
attachments: list[InputAttachment],
|
| 715 |
+
request_id: str = "",
|
| 716 |
+
) -> dict[str, Any]:
|
| 717 |
+
if not attachments:
|
| 718 |
+
return {}
|
| 719 |
+
if len(attachments) > 5:
|
| 720 |
+
raise RuntimeError("Claude 单次最多上传 5 张图片")
|
| 721 |
+
|
| 722 |
+
org_uuid = state["site_context"]["org_uuid"]
|
| 723 |
+
url = (
|
| 724 |
+
f"{self.api_base}/organizations/{org_uuid}/conversations/"
|
| 725 |
+
f"{session_id}/wiggle/upload-file"
|
| 726 |
+
)
|
| 727 |
+
file_ids: list[str] = []
|
| 728 |
+
for attachment in attachments:
|
| 729 |
+
resp = await upload_file_via_context_request(
|
| 730 |
+
context,
|
| 731 |
+
page,
|
| 732 |
+
url,
|
| 733 |
+
filename=attachment.filename,
|
| 734 |
+
mime_type=attachment.mime_type,
|
| 735 |
+
data=attachment.data,
|
| 736 |
+
field_name="file",
|
| 737 |
+
timeout_ms=30000,
|
| 738 |
+
request_id=request_id or f"upload:{session_id}",
|
| 739 |
+
)
|
| 740 |
+
status = int(resp.get("status") or 0)
|
| 741 |
+
if status not in (200, 201):
|
| 742 |
+
text = str(resp.get("text") or "")[:500]
|
| 743 |
+
raise RuntimeError(f"图片上传失败 {status}: {text}")
|
| 744 |
+
data = resp.get("json")
|
| 745 |
+
if not isinstance(data, dict):
|
| 746 |
+
raise RuntimeError("图片上传返回非 JSON")
|
| 747 |
+
file_uuid = data.get("file_uuid") or data.get("uuid")
|
| 748 |
+
if not file_uuid:
|
| 749 |
+
raise RuntimeError("图片上传未返回 file_uuid")
|
| 750 |
+
file_ids.append(str(file_uuid))
|
| 751 |
+
return {"attachments": [], "files": file_ids}
|
| 752 |
+
|
| 753 |
+
|
| 754 |
+
def register_claude_plugin() -> None:
|
| 755 |
+
"""注册 Claude 插件到全局 Registry。"""
|
| 756 |
+
PluginRegistry.register(ClaudePlugin())
|
core/plugin/errors.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""插件层公共异常,独立模块避免循环导入。"""
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class AccountFrozenError(RuntimeError):
|
| 5 |
+
"""
|
| 6 |
+
插件在检测到账号被限流/额度用尽时抛出,携带解冻时间戳(Unix 秒)。
|
| 7 |
+
由 chat_handler 捕获后写入配置并重试其他账号。
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
def __init__(self, message: str, unfreeze_at: int) -> None:
|
| 11 |
+
super().__init__(message)
|
| 12 |
+
self.unfreeze_at = unfreeze_at
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class BrowserResourceInvalidError(RuntimeError):
|
| 16 |
+
"""页面 / tab / browser 资源失效时抛出,供上层做定向回收与重试。"""
|
| 17 |
+
|
| 18 |
+
def __init__(
|
| 19 |
+
self,
|
| 20 |
+
detail: str,
|
| 21 |
+
*,
|
| 22 |
+
helper_name: str,
|
| 23 |
+
operation: str,
|
| 24 |
+
stage: str,
|
| 25 |
+
resource_hint: str,
|
| 26 |
+
request_url: str,
|
| 27 |
+
page_url: str,
|
| 28 |
+
request_id: str | None = None,
|
| 29 |
+
stream_phase: str | None = None,
|
| 30 |
+
proxy_key: object | None = None,
|
| 31 |
+
type_name: str | None = None,
|
| 32 |
+
account_id: str | None = None,
|
| 33 |
+
) -> None:
|
| 34 |
+
super().__init__(detail)
|
| 35 |
+
self.detail = detail
|
| 36 |
+
self.helper_name = helper_name
|
| 37 |
+
self.operation = operation
|
| 38 |
+
self.stage = stage
|
| 39 |
+
self.resource_hint = resource_hint
|
| 40 |
+
self.request_url = request_url
|
| 41 |
+
self.page_url = page_url
|
| 42 |
+
self.request_id = request_id
|
| 43 |
+
self.stream_phase = stream_phase
|
| 44 |
+
self.proxy_key = proxy_key
|
| 45 |
+
self.type_name = type_name
|
| 46 |
+
self.account_id = account_id
|
core/plugin/helpers.py
ADDED
|
@@ -0,0 +1,1246 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
插件通用能力:页面复用、Cookie 登录、在浏览器内发起 fetch 并流式回传。
|
| 3 |
+
接入方只需实现站点特有的 URL/请求体/SSE 解析,其余复用此处逻辑。
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import base64
|
| 8 |
+
import codecs
|
| 9 |
+
import json
|
| 10 |
+
import logging
|
| 11 |
+
from collections.abc import Callable
|
| 12 |
+
from typing import Any, AsyncIterator
|
| 13 |
+
from urllib.parse import urlparse
|
| 14 |
+
|
| 15 |
+
from curl_cffi import requests as curl_requests
|
| 16 |
+
from playwright.async_api import BrowserContext, Page
|
| 17 |
+
|
| 18 |
+
from core.plugin.errors import AccountFrozenError, BrowserResourceInvalidError
|
| 19 |
+
|
| 20 |
+
ParseSseEvent = Callable[[str], tuple[list[str], str | None, str | None]]
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
|
| 24 |
+
_BROWSER_RESOURCE_ERROR_PATTERNS: tuple[tuple[str, str, str], ...] = (
|
| 25 |
+
("target crashed", "page", "target_crashed"),
|
| 26 |
+
("page crashed", "browser", "page_crashed"),
|
| 27 |
+
("execution context was destroyed", "page", "execution_context_destroyed"),
|
| 28 |
+
("navigating frame was detached", "page", "frame_detached"),
|
| 29 |
+
("frame was detached", "page", "frame_detached"),
|
| 30 |
+
("session closed. most likely the page has been closed", "page", "page_closed"),
|
| 31 |
+
("most likely the page has been closed", "page", "page_closed"),
|
| 32 |
+
("browser context has been closed", "page", "context_closed"),
|
| 33 |
+
("context has been closed", "page", "context_closed"),
|
| 34 |
+
("target page, context or browser has been closed", "page", "page_or_browser_closed"),
|
| 35 |
+
("page has been closed", "page", "page_closed"),
|
| 36 |
+
("target closed", "page", "target_closed"),
|
| 37 |
+
("browser has been closed", "browser", "browser_closed"),
|
| 38 |
+
("browser closed", "browser", "browser_closed"),
|
| 39 |
+
("connection closed", "browser", "browser_disconnected"),
|
| 40 |
+
("connection terminated", "browser", "browser_disconnected"),
|
| 41 |
+
("has been disconnected", "browser", "browser_disconnected"),
|
| 42 |
+
# Proxy / network tunnel errors — retryable via browser re-launch
|
| 43 |
+
("err_tunnel_connection_failed", "browser", "proxy_tunnel_failed"),
|
| 44 |
+
("err_proxy_connection_failed", "browser", "proxy_connection_failed"),
|
| 45 |
+
("err_connection_refused", "browser", "connection_refused"),
|
| 46 |
+
("err_connection_timed_out", "browser", "connection_timed_out"),
|
| 47 |
+
("err_connection_reset", "browser", "connection_reset"),
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _truncate_for_log(value: str, limit: int = 240) -> str:
|
| 52 |
+
if len(value) <= limit:
|
| 53 |
+
return value
|
| 54 |
+
return value[:limit] + "..."
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _safe_page_url(page: Page | None) -> str:
|
| 59 |
+
if page is None:
|
| 60 |
+
return ""
|
| 61 |
+
try:
|
| 62 |
+
return page.url or ""
|
| 63 |
+
except Exception:
|
| 64 |
+
return ""
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _evaluate_timeout_seconds(timeout_ms: int, grace_seconds: float = 5.0) -> float:
|
| 69 |
+
return max(5.0, float(timeout_ms) / 1000.0 + grace_seconds)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _consume_background_task_result(task: asyncio.Task[Any]) -> None:
|
| 74 |
+
try:
|
| 75 |
+
if not task.cancelled():
|
| 76 |
+
task.exception()
|
| 77 |
+
except Exception:
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def _classify_browser_resource_error(
|
| 83 |
+
exc: Exception,
|
| 84 |
+
*,
|
| 85 |
+
helper_name: str,
|
| 86 |
+
operation: str,
|
| 87 |
+
stage: str,
|
| 88 |
+
request_url: str,
|
| 89 |
+
page: Page | None,
|
| 90 |
+
request_id: str | None = None,
|
| 91 |
+
stream_phase: str | None = None,
|
| 92 |
+
) -> BrowserResourceInvalidError | None:
|
| 93 |
+
message = str(exc).strip() or exc.__class__.__name__
|
| 94 |
+
normalized = message.lower()
|
| 95 |
+
for pattern, resource_hint, reason in _BROWSER_RESOURCE_ERROR_PATTERNS:
|
| 96 |
+
if pattern not in normalized:
|
| 97 |
+
continue
|
| 98 |
+
page_url = _safe_page_url(page)
|
| 99 |
+
logger.warning(
|
| 100 |
+
"[browser-resource-invalid] helper=%s operation=%s stage=%s reason=%s resource=%s request_id=%s stream_phase=%s request_url=%s page.url=%s err=%s",
|
| 101 |
+
helper_name,
|
| 102 |
+
operation,
|
| 103 |
+
stage,
|
| 104 |
+
reason,
|
| 105 |
+
resource_hint,
|
| 106 |
+
request_id,
|
| 107 |
+
stream_phase,
|
| 108 |
+
_truncate_for_log(request_url),
|
| 109 |
+
_truncate_for_log(page_url),
|
| 110 |
+
_truncate_for_log(message, 400),
|
| 111 |
+
)
|
| 112 |
+
return BrowserResourceInvalidError(
|
| 113 |
+
message,
|
| 114 |
+
helper_name=helper_name,
|
| 115 |
+
operation=operation,
|
| 116 |
+
stage=stage,
|
| 117 |
+
resource_hint=resource_hint,
|
| 118 |
+
request_url=request_url,
|
| 119 |
+
page_url=page_url,
|
| 120 |
+
request_id=request_id,
|
| 121 |
+
stream_phase=stream_phase,
|
| 122 |
+
)
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
# 在页面内 POST 请求并流式回传:成功时逐块发送响应体,失败时发送 __error__: 前缀 + 信息,最后发送 __done__
|
| 126 |
+
# bindingName 按请求唯一,同一 page 多并发时互不串数据
|
| 127 |
+
PAGE_FETCH_STREAM_JS = """
|
| 128 |
+
async ({ url, body, bindingName, timeoutMs }) => {
|
| 129 |
+
const send = globalThis[bindingName];
|
| 130 |
+
const done = "__done__";
|
| 131 |
+
const errPrefix = "__error__:";
|
| 132 |
+
try {
|
| 133 |
+
const ctrl = new AbortController();
|
| 134 |
+
const effectiveTimeoutMs = timeoutMs || 90000;
|
| 135 |
+
const t = setTimeout(() => ctrl.abort(), effectiveTimeoutMs);
|
| 136 |
+
const resp = await fetch(url, {
|
| 137 |
+
method: "POST",
|
| 138 |
+
body: body,
|
| 139 |
+
headers: { "Content-Type": "application/json", "Accept": "text/event-stream" },
|
| 140 |
+
credentials: "include",
|
| 141 |
+
signal: ctrl.signal
|
| 142 |
+
});
|
| 143 |
+
clearTimeout(t);
|
| 144 |
+
if (!resp.ok) {
|
| 145 |
+
const errText = await resp.text();
|
| 146 |
+
const errSnippet = (errText && errText.length > 800) ? errText.slice(0, 800) + "..." : (errText || "");
|
| 147 |
+
await send(errPrefix + "HTTP " + resp.status + " " + errSnippet);
|
| 148 |
+
await send(done);
|
| 149 |
+
return;
|
| 150 |
+
}
|
| 151 |
+
if (!resp.body) {
|
| 152 |
+
await send(errPrefix + "No response body");
|
| 153 |
+
await send(done);
|
| 154 |
+
return;
|
| 155 |
+
}
|
| 156 |
+
const headersObj = {};
|
| 157 |
+
resp.headers.forEach((v, k) => { headersObj[k] = v; });
|
| 158 |
+
await send("__headers__:" + JSON.stringify(headersObj));
|
| 159 |
+
const reader = resp.body.getReader();
|
| 160 |
+
const dec = new TextDecoder();
|
| 161 |
+
while (true) {
|
| 162 |
+
const { done: streamDone, value } = await reader.read();
|
| 163 |
+
if (streamDone) break;
|
| 164 |
+
await send(dec.decode(value));
|
| 165 |
+
}
|
| 166 |
+
} catch (e) {
|
| 167 |
+
const msg = e.name === "AbortError" ? `请求超时(${Math.floor(effectiveTimeoutMs / 1000)}s)` : (e.message || String(e));
|
| 168 |
+
await send(errPrefix + msg);
|
| 169 |
+
}
|
| 170 |
+
await send(done);
|
| 171 |
+
}
|
| 172 |
+
"""
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
PAGE_FETCH_JSON_JS = """
|
| 176 |
+
async ({ url, method, body, headers, timeoutMs }) => {
|
| 177 |
+
const ctrl = new AbortController();
|
| 178 |
+
const t = setTimeout(() => ctrl.abort(), timeoutMs || 15000);
|
| 179 |
+
try {
|
| 180 |
+
const resp = await fetch(url, {
|
| 181 |
+
method: method || "GET",
|
| 182 |
+
body: body ?? undefined,
|
| 183 |
+
headers: headers || {},
|
| 184 |
+
credentials: "include",
|
| 185 |
+
signal: ctrl.signal
|
| 186 |
+
});
|
| 187 |
+
clearTimeout(t);
|
| 188 |
+
const text = await resp.text();
|
| 189 |
+
const headersObj = {};
|
| 190 |
+
resp.headers.forEach((v, k) => { headersObj[k] = v; });
|
| 191 |
+
return {
|
| 192 |
+
ok: resp.ok,
|
| 193 |
+
status: resp.status,
|
| 194 |
+
statusText: resp.statusText,
|
| 195 |
+
url: resp.url,
|
| 196 |
+
redirected: resp.redirected,
|
| 197 |
+
headers: headersObj,
|
| 198 |
+
text,
|
| 199 |
+
};
|
| 200 |
+
} catch (e) {
|
| 201 |
+
clearTimeout(t);
|
| 202 |
+
const msg = e.name === "AbortError" ? `请求超时(${Math.floor((timeoutMs || 15000) / 1000)}s)` : (e.message || String(e));
|
| 203 |
+
return { error: msg };
|
| 204 |
+
}
|
| 205 |
+
}
|
| 206 |
+
"""
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
PAGE_FETCH_MULTIPART_JS = """
|
| 210 |
+
async ({ url, filename, mimeType, dataBase64, fieldName, extraFields, timeoutMs }) => {
|
| 211 |
+
const ctrl = new AbortController();
|
| 212 |
+
const t = setTimeout(() => ctrl.abort(), timeoutMs || 30000);
|
| 213 |
+
try {
|
| 214 |
+
const binary = atob(dataBase64);
|
| 215 |
+
const bytes = new Uint8Array(binary.length);
|
| 216 |
+
for (let i = 0; i < binary.length; i += 1) {
|
| 217 |
+
bytes[i] = binary.charCodeAt(i);
|
| 218 |
+
}
|
| 219 |
+
const form = new FormData();
|
| 220 |
+
if (extraFields) {
|
| 221 |
+
Object.entries(extraFields).forEach(([k, v]) => {
|
| 222 |
+
if (v !== undefined && v !== null) form.append(k, String(v));
|
| 223 |
+
});
|
| 224 |
+
}
|
| 225 |
+
const file = new File([bytes], filename, { type: mimeType || "application/octet-stream" });
|
| 226 |
+
form.append(fieldName || "file", file);
|
| 227 |
+
const resp = await fetch(url, {
|
| 228 |
+
method: "POST",
|
| 229 |
+
body: form,
|
| 230 |
+
credentials: "include",
|
| 231 |
+
signal: ctrl.signal
|
| 232 |
+
});
|
| 233 |
+
clearTimeout(t);
|
| 234 |
+
const text = await resp.text();
|
| 235 |
+
const headersObj = {};
|
| 236 |
+
resp.headers.forEach((v, k) => { headersObj[k] = v; });
|
| 237 |
+
return {
|
| 238 |
+
ok: resp.ok,
|
| 239 |
+
status: resp.status,
|
| 240 |
+
statusText: resp.statusText,
|
| 241 |
+
url: resp.url,
|
| 242 |
+
redirected: resp.redirected,
|
| 243 |
+
headers: headersObj,
|
| 244 |
+
text,
|
| 245 |
+
};
|
| 246 |
+
} catch (e) {
|
| 247 |
+
clearTimeout(t);
|
| 248 |
+
const msg = e.name === "AbortError" ? `请求超时(${Math.floor((timeoutMs || 30000) / 1000)}s)` : (e.message || String(e));
|
| 249 |
+
return { error: msg };
|
| 250 |
+
}
|
| 251 |
+
}
|
| 252 |
+
"""
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
async def ensure_page_for_site(
|
| 256 |
+
context: BrowserContext,
|
| 257 |
+
url_contains: str,
|
| 258 |
+
start_url: str,
|
| 259 |
+
*,
|
| 260 |
+
timeout: int = 45000,
|
| 261 |
+
) -> Page:
|
| 262 |
+
"""
|
| 263 |
+
若已有页面 URL 包含 url_contains 则复用,否则 new_page 并 goto start_url。
|
| 264 |
+
接入方只需提供「站点特征」和「入口 URL」。
|
| 265 |
+
"""
|
| 266 |
+
if context.pages:
|
| 267 |
+
for p in context.pages:
|
| 268 |
+
if url_contains in (p.url or ""):
|
| 269 |
+
return p
|
| 270 |
+
page = await context.new_page()
|
| 271 |
+
await page.goto(start_url, wait_until="commit", timeout=timeout)
|
| 272 |
+
return page
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
async def create_page_for_site(
|
| 276 |
+
context: BrowserContext,
|
| 277 |
+
start_url: str,
|
| 278 |
+
*,
|
| 279 |
+
reuse_page: Page | None = None,
|
| 280 |
+
timeout: int = 45000,
|
| 281 |
+
) -> Page:
|
| 282 |
+
"""
|
| 283 |
+
若传入 reuse_page 则在其上 goto start_url,否则 new_page 再 goto。
|
| 284 |
+
用于复用浏览器默认空白页或 page 池的初始化与补回。
|
| 285 |
+
"""
|
| 286 |
+
if reuse_page is not None:
|
| 287 |
+
await reuse_page.goto(start_url, wait_until="commit", timeout=timeout)
|
| 288 |
+
return reuse_page
|
| 289 |
+
page = await context.new_page()
|
| 290 |
+
await page.goto(start_url, wait_until="commit", timeout=timeout)
|
| 291 |
+
return page
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def _cookie_domain_matches(cookie_domain: str, site_domain: str) -> bool:
|
| 295 |
+
"""判断 cookie 的 domain 是否属于站点 domain(如 .claude.ai 与 claude.ai 视为同一域)。"""
|
| 296 |
+
a = cookie_domain if cookie_domain.startswith(".") else f".{cookie_domain}"
|
| 297 |
+
b = site_domain if site_domain.startswith(".") else f".{site_domain}"
|
| 298 |
+
return a == b
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def _cookie_to_set_param(c: Any) -> dict[str, str]:
|
| 302 |
+
"""将 context.cookies() 返回的项转为 add_cookies 接受的 SetCookieParam 格式。"""
|
| 303 |
+
return {
|
| 304 |
+
"name": c["name"],
|
| 305 |
+
"value": c["value"],
|
| 306 |
+
"domain": c.get("domain") or "",
|
| 307 |
+
"path": c.get("path") or "/",
|
| 308 |
+
}
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
async def clear_cookies_for_domain(
|
| 312 |
+
context: BrowserContext,
|
| 313 |
+
site_domain: str,
|
| 314 |
+
) -> None:
|
| 315 |
+
"""清除 context 内属于指定站点域的所有 cookie,保留其他域。"""
|
| 316 |
+
cookies = await context.cookies()
|
| 317 |
+
keep = [
|
| 318 |
+
c
|
| 319 |
+
for c in cookies
|
| 320 |
+
if not _cookie_domain_matches(c.get("domain", ""), site_domain)
|
| 321 |
+
]
|
| 322 |
+
await context.clear_cookies()
|
| 323 |
+
if keep:
|
| 324 |
+
await context.add_cookies([_cookie_to_set_param(c) for c in keep]) # type: ignore[arg-type]
|
| 325 |
+
logger.info(
|
| 326 |
+
"[auth] cleared cookies for domain=%s (kept %s cookies)", site_domain, len(keep)
|
| 327 |
+
)
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
async def clear_page_storage_for_switch(page: Page) -> None:
|
| 331 |
+
"""切号前清空当前页面的 localStorage(当前 origin)。"""
|
| 332 |
+
try:
|
| 333 |
+
await page.evaluate("() => { window.localStorage.clear(); }")
|
| 334 |
+
logger.info("[auth] cleared localStorage for switch")
|
| 335 |
+
except Exception as e:
|
| 336 |
+
logger.warning("[auth] clear localStorage failed (page may be detached): %s", e)
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
async def safe_page_reload(page: Page, url: str | None = None) -> None:
|
| 340 |
+
"""安全地 reload 或 goto(url),忽略因 ERR_ABORTED / frame detached 导致的异常。"""
|
| 341 |
+
try:
|
| 342 |
+
if url:
|
| 343 |
+
await page.goto(url, wait_until="commit", timeout=45000)
|
| 344 |
+
else:
|
| 345 |
+
await page.reload(wait_until="domcontentloaded", timeout=45000)
|
| 346 |
+
except Exception as e:
|
| 347 |
+
err_msg = str(e)
|
| 348 |
+
if "ERR_ABORTED" in err_msg or "detached" in err_msg.lower():
|
| 349 |
+
logger.warning(
|
| 350 |
+
"[auth] page.reload/goto 被中止或 frame 已分离: %s", err_msg[:200]
|
| 351 |
+
)
|
| 352 |
+
else:
|
| 353 |
+
raise
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
async def apply_cookie_auth(
|
| 357 |
+
context: BrowserContext,
|
| 358 |
+
page: Page,
|
| 359 |
+
auth: dict[str, Any],
|
| 360 |
+
cookie_name: str,
|
| 361 |
+
auth_keys: list[str],
|
| 362 |
+
domain: str,
|
| 363 |
+
*,
|
| 364 |
+
path: str = "/",
|
| 365 |
+
reload: bool = True,
|
| 366 |
+
) -> None:
|
| 367 |
+
"""
|
| 368 |
+
从 auth 中按 auth_keys 顺序取第一个非空值作为 cookie 值,写入 context 并可选 reload。
|
| 369 |
+
接入方只需提供 cookie 名、auth 里的 key 列表、域名。
|
| 370 |
+
仅写 cookie 不 reload 时,同 context 内的 fetch() 仍会带上 cookie;reload 仅在需要页面文档同步登录态时用。
|
| 371 |
+
"""
|
| 372 |
+
value = None
|
| 373 |
+
for k in auth_keys:
|
| 374 |
+
v = auth.get(k)
|
| 375 |
+
if v is not None and v != "":
|
| 376 |
+
value = str(v).strip()
|
| 377 |
+
if value:
|
| 378 |
+
break
|
| 379 |
+
if not value:
|
| 380 |
+
raise ValueError(f"auth 需包含以下其一且非空: {auth_keys}")
|
| 381 |
+
|
| 382 |
+
logger.info(
|
| 383 |
+
"[auth] context.add_cookies domain=%s name=%s reload=%s page.url=%s",
|
| 384 |
+
domain,
|
| 385 |
+
cookie_name,
|
| 386 |
+
reload,
|
| 387 |
+
page.url,
|
| 388 |
+
)
|
| 389 |
+
await context.add_cookies(
|
| 390 |
+
[
|
| 391 |
+
{
|
| 392 |
+
"name": cookie_name,
|
| 393 |
+
"value": value,
|
| 394 |
+
"domain": domain,
|
| 395 |
+
"path": path,
|
| 396 |
+
"secure": True,
|
| 397 |
+
"httpOnly": True,
|
| 398 |
+
}
|
| 399 |
+
]
|
| 400 |
+
)
|
| 401 |
+
if reload:
|
| 402 |
+
await safe_page_reload(page)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def _attach_json_body(result: dict[str, Any], *, invalid_message: str) -> dict[str, Any]:
|
| 406 |
+
if not isinstance(result, dict):
|
| 407 |
+
raise RuntimeError(invalid_message)
|
| 408 |
+
error = result.get("error")
|
| 409 |
+
if error:
|
| 410 |
+
raise RuntimeError(str(error))
|
| 411 |
+
text = result.get("text")
|
| 412 |
+
if isinstance(text, str) and text:
|
| 413 |
+
try:
|
| 414 |
+
result["json"] = json.loads(text)
|
| 415 |
+
except json.JSONDecodeError:
|
| 416 |
+
result["json"] = None
|
| 417 |
+
else:
|
| 418 |
+
result["json"] = None
|
| 419 |
+
return result
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def _cookie_domain_matches_url(cookie_domain: str, target_url: str) -> bool:
|
| 423 |
+
host = (urlparse(target_url).hostname or "").lower().lstrip(".")
|
| 424 |
+
domain = (cookie_domain or "").lower().lstrip(".")
|
| 425 |
+
if not host or not domain:
|
| 426 |
+
return False
|
| 427 |
+
return host == domain or host.endswith(f".{domain}")
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def _cookies_for_url(cookies: list[dict[str, Any]], target_url: str) -> dict[str, str]:
|
| 431 |
+
target_host = (urlparse(target_url).hostname or "").lower().lstrip(".")
|
| 432 |
+
if not target_host:
|
| 433 |
+
return {}
|
| 434 |
+
selected: dict[str, str] = {}
|
| 435 |
+
for cookie in cookies:
|
| 436 |
+
name = str(cookie.get("name") or "").strip()
|
| 437 |
+
value = str(cookie.get("value") or "")
|
| 438 |
+
domain = str(cookie.get("domain") or "").strip()
|
| 439 |
+
if not name or not _cookie_domain_matches_url(domain, target_url):
|
| 440 |
+
continue
|
| 441 |
+
selected[name] = value
|
| 442 |
+
return selected
|
| 443 |
+
|
| 444 |
+
|
| 445 |
+
async def _stream_via_http_client(
|
| 446 |
+
context: BrowserContext,
|
| 447 |
+
page: Page | None,
|
| 448 |
+
url: str,
|
| 449 |
+
body: str,
|
| 450 |
+
request_id: str,
|
| 451 |
+
*,
|
| 452 |
+
on_http_error: Callable[[str, dict[str, str] | None], int | None] | None = None,
|
| 453 |
+
on_headers: Callable[[dict[str, str]], None] | None = None,
|
| 454 |
+
connect_timeout: float = 30.0,
|
| 455 |
+
read_timeout: float = 300.0,
|
| 456 |
+
impersonate: str = "chrome142",
|
| 457 |
+
proxy_url: str | None = None,
|
| 458 |
+
proxy_auth: tuple[str, str] | None = None,
|
| 459 |
+
) -> AsyncIterator[str]:
|
| 460 |
+
logger.info(
|
| 461 |
+
"[fetch] helper=stream_raw_via_context_request request_id=%s stage=http_client url=%s page.url=%s",
|
| 462 |
+
request_id,
|
| 463 |
+
_truncate_for_log(url, 120),
|
| 464 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
parsed = urlparse(url)
|
| 468 |
+
referer = ""
|
| 469 |
+
if parsed.scheme and parsed.netloc:
|
| 470 |
+
referer = f"{parsed.scheme}://{parsed.netloc}/"
|
| 471 |
+
|
| 472 |
+
try:
|
| 473 |
+
cookies = await context.cookies([url])
|
| 474 |
+
except Exception as e:
|
| 475 |
+
classified = _classify_browser_resource_error(
|
| 476 |
+
e,
|
| 477 |
+
helper_name="stream_raw_via_context_request",
|
| 478 |
+
operation="context.cookies",
|
| 479 |
+
stage="load_cookies",
|
| 480 |
+
request_url=url,
|
| 481 |
+
page=page,
|
| 482 |
+
request_id=request_id,
|
| 483 |
+
stream_phase="fetch",
|
| 484 |
+
)
|
| 485 |
+
if classified is not None:
|
| 486 |
+
raise classified from e
|
| 487 |
+
raise BrowserResourceInvalidError(
|
| 488 |
+
str(e),
|
| 489 |
+
helper_name="stream_raw_via_context_request",
|
| 490 |
+
operation="context.cookies",
|
| 491 |
+
stage="load_cookies",
|
| 492 |
+
resource_hint="page",
|
| 493 |
+
request_url=url,
|
| 494 |
+
page_url=_safe_page_url(page),
|
| 495 |
+
request_id=request_id,
|
| 496 |
+
stream_phase="fetch",
|
| 497 |
+
) from e
|
| 498 |
+
cookie_jar = _cookies_for_url(cookies, url)
|
| 499 |
+
session_kwargs: dict[str, Any] = {
|
| 500 |
+
"impersonate": impersonate,
|
| 501 |
+
"timeout": (connect_timeout, read_timeout),
|
| 502 |
+
"verify": True,
|
| 503 |
+
"allow_redirects": True,
|
| 504 |
+
"default_headers": True,
|
| 505 |
+
}
|
| 506 |
+
if cookie_jar:
|
| 507 |
+
session_kwargs["cookies"] = cookie_jar
|
| 508 |
+
if proxy_url:
|
| 509 |
+
session_kwargs["proxy"] = proxy_url
|
| 510 |
+
if proxy_auth:
|
| 511 |
+
session_kwargs["proxy_auth"] = proxy_auth
|
| 512 |
+
|
| 513 |
+
response = None
|
| 514 |
+
try:
|
| 515 |
+
async with curl_requests.AsyncSession(**session_kwargs) as session:
|
| 516 |
+
try:
|
| 517 |
+
request_headers = {
|
| 518 |
+
"Content-Type": "application/json",
|
| 519 |
+
"Accept": "text/event-stream",
|
| 520 |
+
}
|
| 521 |
+
if referer:
|
| 522 |
+
request_headers["Origin"] = referer.rstrip("/")
|
| 523 |
+
async with session.stream(
|
| 524 |
+
"POST",
|
| 525 |
+
url,
|
| 526 |
+
data=body.encode("utf-8"),
|
| 527 |
+
headers=request_headers,
|
| 528 |
+
) as response:
|
| 529 |
+
headers = {
|
| 530 |
+
str(k).lower(): str(v) for k, v in response.headers.items()
|
| 531 |
+
}
|
| 532 |
+
if on_headers:
|
| 533 |
+
on_headers(headers)
|
| 534 |
+
|
| 535 |
+
status = int(response.status_code)
|
| 536 |
+
if status < 200 or status >= 300:
|
| 537 |
+
body_parts: list[str] = []
|
| 538 |
+
decoder = codecs.getincrementaldecoder("utf-8")("replace")
|
| 539 |
+
async for chunk in response.aiter_content():
|
| 540 |
+
if not chunk:
|
| 541 |
+
continue
|
| 542 |
+
body_parts.append(decoder.decode(chunk))
|
| 543 |
+
if sum(len(part) for part in body_parts) >= 800:
|
| 544 |
+
break
|
| 545 |
+
body_parts.append(decoder.decode(b"", final=True))
|
| 546 |
+
snippet = "".join(body_parts)
|
| 547 |
+
if len(snippet) > 800:
|
| 548 |
+
snippet = snippet[:800] + "..."
|
| 549 |
+
msg = f"HTTP {status} {snippet}".strip()
|
| 550 |
+
if on_http_error:
|
| 551 |
+
unfreeze_at = on_http_error(msg, headers)
|
| 552 |
+
if isinstance(unfreeze_at, int):
|
| 553 |
+
logger.warning("[fetch] HTTP error from context request: %s", msg)
|
| 554 |
+
raise AccountFrozenError(msg, unfreeze_at)
|
| 555 |
+
raise RuntimeError(msg)
|
| 556 |
+
|
| 557 |
+
decoder = codecs.getincrementaldecoder("utf-8")("replace")
|
| 558 |
+
async for chunk in response.aiter_content():
|
| 559 |
+
if not chunk:
|
| 560 |
+
continue
|
| 561 |
+
text = decoder.decode(chunk)
|
| 562 |
+
if text:
|
| 563 |
+
yield text
|
| 564 |
+
tail = decoder.decode(b"", final=True)
|
| 565 |
+
if tail:
|
| 566 |
+
yield tail
|
| 567 |
+
except Exception as e:
|
| 568 |
+
classified = _classify_browser_resource_error(
|
| 569 |
+
e,
|
| 570 |
+
helper_name="stream_raw_via_context_request",
|
| 571 |
+
operation="http_client",
|
| 572 |
+
stage="stream",
|
| 573 |
+
request_url=url,
|
| 574 |
+
page=page,
|
| 575 |
+
request_id=request_id,
|
| 576 |
+
stream_phase="body",
|
| 577 |
+
)
|
| 578 |
+
if classified is not None:
|
| 579 |
+
raise classified from e
|
| 580 |
+
raise BrowserResourceInvalidError(
|
| 581 |
+
str(e),
|
| 582 |
+
helper_name="stream_raw_via_context_request",
|
| 583 |
+
operation="http_client",
|
| 584 |
+
stage="stream",
|
| 585 |
+
resource_hint="transport",
|
| 586 |
+
request_url=url,
|
| 587 |
+
page_url=_safe_page_url(page),
|
| 588 |
+
request_id=request_id,
|
| 589 |
+
stream_phase="body",
|
| 590 |
+
) from e
|
| 591 |
+
except AccountFrozenError:
|
| 592 |
+
raise
|
| 593 |
+
except BrowserResourceInvalidError:
|
| 594 |
+
raise
|
| 595 |
+
except Exception as e:
|
| 596 |
+
classified = _classify_browser_resource_error(
|
| 597 |
+
e,
|
| 598 |
+
helper_name="stream_raw_via_context_request",
|
| 599 |
+
operation="http_client",
|
| 600 |
+
stage="request",
|
| 601 |
+
request_url=url,
|
| 602 |
+
page=page,
|
| 603 |
+
request_id=request_id,
|
| 604 |
+
stream_phase="fetch",
|
| 605 |
+
)
|
| 606 |
+
if classified is not None:
|
| 607 |
+
raise classified from e
|
| 608 |
+
logger.warning(
|
| 609 |
+
"[fetch] helper=stream_raw_via_context_request request_id=%s http_client failed url=%s page.url=%s err=%s",
|
| 610 |
+
request_id,
|
| 611 |
+
_truncate_for_log(url, 120),
|
| 612 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 613 |
+
_truncate_for_log(str(e), 400),
|
| 614 |
+
)
|
| 615 |
+
raise BrowserResourceInvalidError(
|
| 616 |
+
str(e),
|
| 617 |
+
helper_name="stream_raw_via_context_request",
|
| 618 |
+
operation="http_client",
|
| 619 |
+
stage="request",
|
| 620 |
+
resource_hint="transport",
|
| 621 |
+
request_url=url,
|
| 622 |
+
page_url=_safe_page_url(page),
|
| 623 |
+
request_id=request_id,
|
| 624 |
+
stream_phase="fetch",
|
| 625 |
+
) from e
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
async def _request_via_context_request(
|
| 629 |
+
context: BrowserContext,
|
| 630 |
+
page: Page | None,
|
| 631 |
+
url: str,
|
| 632 |
+
*,
|
| 633 |
+
method: str = "GET",
|
| 634 |
+
body: str | None = None,
|
| 635 |
+
headers: dict[str, str] | None = None,
|
| 636 |
+
multipart: dict[str, Any] | None = None,
|
| 637 |
+
timeout_ms: int = 15000,
|
| 638 |
+
request_id: str | None = None,
|
| 639 |
+
helper_name: str,
|
| 640 |
+
) -> dict[str, Any]:
|
| 641 |
+
logger.info(
|
| 642 |
+
"[fetch] helper=%s method=%s request_id=%s url=%s page.url=%s",
|
| 643 |
+
helper_name,
|
| 644 |
+
method,
|
| 645 |
+
request_id,
|
| 646 |
+
_truncate_for_log(url, 120),
|
| 647 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 648 |
+
)
|
| 649 |
+
response = None
|
| 650 |
+
try:
|
| 651 |
+
response = await context.request.fetch(
|
| 652 |
+
url,
|
| 653 |
+
method=method,
|
| 654 |
+
headers=headers or None,
|
| 655 |
+
data=body,
|
| 656 |
+
multipart=multipart,
|
| 657 |
+
timeout=timeout_ms,
|
| 658 |
+
fail_on_status_code=False,
|
| 659 |
+
)
|
| 660 |
+
text = await response.text()
|
| 661 |
+
return {
|
| 662 |
+
"ok": bool(response.ok),
|
| 663 |
+
"status": int(response.status),
|
| 664 |
+
"statusText": str(response.status_text),
|
| 665 |
+
"url": str(response.url),
|
| 666 |
+
"redirected": str(response.url) != url,
|
| 667 |
+
"headers": {str(k): str(v) for k, v in response.headers.items()},
|
| 668 |
+
"text": text,
|
| 669 |
+
}
|
| 670 |
+
except Exception as e:
|
| 671 |
+
classified = _classify_browser_resource_error(
|
| 672 |
+
e,
|
| 673 |
+
helper_name=helper_name,
|
| 674 |
+
operation="context.request",
|
| 675 |
+
stage="fetch",
|
| 676 |
+
request_url=url,
|
| 677 |
+
page=page,
|
| 678 |
+
request_id=request_id,
|
| 679 |
+
)
|
| 680 |
+
if classified is not None:
|
| 681 |
+
raise classified from e
|
| 682 |
+
logger.warning(
|
| 683 |
+
"[fetch] helper=%s request_id=%s context.request failed url=%s page.url=%s err=%s",
|
| 684 |
+
helper_name,
|
| 685 |
+
request_id,
|
| 686 |
+
_truncate_for_log(url, 120),
|
| 687 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 688 |
+
_truncate_for_log(str(e), 400),
|
| 689 |
+
)
|
| 690 |
+
raise RuntimeError(str(e)) from e
|
| 691 |
+
finally:
|
| 692 |
+
if response is not None:
|
| 693 |
+
try:
|
| 694 |
+
await response.dispose()
|
| 695 |
+
except Exception:
|
| 696 |
+
pass
|
| 697 |
+
|
| 698 |
+
|
| 699 |
+
async def request_json_via_context_request(
|
| 700 |
+
context: BrowserContext,
|
| 701 |
+
page: Page | None,
|
| 702 |
+
url: str,
|
| 703 |
+
*,
|
| 704 |
+
method: str = "GET",
|
| 705 |
+
body: str | None = None,
|
| 706 |
+
headers: dict[str, str] | None = None,
|
| 707 |
+
timeout_ms: int = 15000,
|
| 708 |
+
request_id: str | None = None,
|
| 709 |
+
) -> dict[str, Any]:
|
| 710 |
+
result = await _request_via_context_request(
|
| 711 |
+
context,
|
| 712 |
+
page,
|
| 713 |
+
url,
|
| 714 |
+
method=method,
|
| 715 |
+
body=body,
|
| 716 |
+
headers=headers,
|
| 717 |
+
timeout_ms=timeout_ms,
|
| 718 |
+
request_id=request_id,
|
| 719 |
+
helper_name="request_json_via_context_request",
|
| 720 |
+
)
|
| 721 |
+
return _attach_json_body(result, invalid_message="控制请求返回结果异常")
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
async def request_json_via_page_fetch(
|
| 725 |
+
page: Page,
|
| 726 |
+
url: str,
|
| 727 |
+
*,
|
| 728 |
+
method: str = "GET",
|
| 729 |
+
body: str | None = None,
|
| 730 |
+
headers: dict[str, str] | None = None,
|
| 731 |
+
timeout_ms: int = 15000,
|
| 732 |
+
request_id: str | None = None,
|
| 733 |
+
) -> dict[str, Any]:
|
| 734 |
+
"""
|
| 735 |
+
在页面内发起非流式 fetch,请求结果按 JSON 优先解析返回。
|
| 736 |
+
这样能复用浏览器真实网络栈、cookie 与代理扩展能力。
|
| 737 |
+
"""
|
| 738 |
+
logger.info(
|
| 739 |
+
"[fetch] helper=request_json_via_page_fetch method=%s request_id=%s url=%s page.url=%s",
|
| 740 |
+
method,
|
| 741 |
+
request_id,
|
| 742 |
+
_truncate_for_log(url, 120),
|
| 743 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 744 |
+
)
|
| 745 |
+
try:
|
| 746 |
+
result = await asyncio.wait_for(
|
| 747 |
+
page.evaluate(
|
| 748 |
+
PAGE_FETCH_JSON_JS,
|
| 749 |
+
{
|
| 750 |
+
"url": url,
|
| 751 |
+
"method": method,
|
| 752 |
+
"body": body,
|
| 753 |
+
"headers": headers or {},
|
| 754 |
+
"timeoutMs": timeout_ms,
|
| 755 |
+
},
|
| 756 |
+
),
|
| 757 |
+
timeout=_evaluate_timeout_seconds(timeout_ms),
|
| 758 |
+
)
|
| 759 |
+
except asyncio.TimeoutError as e:
|
| 760 |
+
logger.warning(
|
| 761 |
+
"[fetch] helper=request_json_via_page_fetch request_id=%s evaluate timeout url=%s page.url=%s",
|
| 762 |
+
request_id,
|
| 763 |
+
_truncate_for_log(url, 120),
|
| 764 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 765 |
+
)
|
| 766 |
+
raise BrowserResourceInvalidError(
|
| 767 |
+
f"page.evaluate timeout after {_evaluate_timeout_seconds(timeout_ms):.1f}s",
|
| 768 |
+
helper_name="request_json_via_page_fetch",
|
| 769 |
+
operation="page.evaluate",
|
| 770 |
+
stage="evaluate_timeout",
|
| 771 |
+
resource_hint="page",
|
| 772 |
+
request_url=url,
|
| 773 |
+
page_url=_safe_page_url(page),
|
| 774 |
+
request_id=request_id,
|
| 775 |
+
) from e
|
| 776 |
+
except Exception as e:
|
| 777 |
+
classified = _classify_browser_resource_error(
|
| 778 |
+
e,
|
| 779 |
+
helper_name="request_json_via_page_fetch",
|
| 780 |
+
operation="page.evaluate",
|
| 781 |
+
stage="evaluate",
|
| 782 |
+
request_url=url,
|
| 783 |
+
page=page,
|
| 784 |
+
request_id=request_id,
|
| 785 |
+
)
|
| 786 |
+
if classified is not None:
|
| 787 |
+
raise classified from e
|
| 788 |
+
raise
|
| 789 |
+
return _attach_json_body(result, invalid_message="页面 fetch 返回结果异常")
|
| 790 |
+
|
| 791 |
+
|
| 792 |
+
async def upload_file_via_context_request(
|
| 793 |
+
context: BrowserContext,
|
| 794 |
+
page: Page | None,
|
| 795 |
+
url: str,
|
| 796 |
+
*,
|
| 797 |
+
filename: str,
|
| 798 |
+
mime_type: str,
|
| 799 |
+
data: bytes,
|
| 800 |
+
field_name: str = "file",
|
| 801 |
+
extra_fields: dict[str, str] | None = None,
|
| 802 |
+
timeout_ms: int = 30000,
|
| 803 |
+
request_id: str | None = None,
|
| 804 |
+
) -> dict[str, Any]:
|
| 805 |
+
multipart: dict[str, Any] = dict(extra_fields or {})
|
| 806 |
+
multipart[field_name] = {
|
| 807 |
+
"name": filename,
|
| 808 |
+
"mimeType": mime_type or "application/octet-stream",
|
| 809 |
+
"buffer": data,
|
| 810 |
+
}
|
| 811 |
+
result = await _request_via_context_request(
|
| 812 |
+
context,
|
| 813 |
+
page,
|
| 814 |
+
url,
|
| 815 |
+
method="POST",
|
| 816 |
+
multipart=multipart,
|
| 817 |
+
timeout_ms=timeout_ms,
|
| 818 |
+
request_id=request_id,
|
| 819 |
+
helper_name="upload_file_via_context_request",
|
| 820 |
+
)
|
| 821 |
+
return _attach_json_body(result, invalid_message="控制上传返回结果异常")
|
| 822 |
+
|
| 823 |
+
|
| 824 |
+
async def upload_file_via_page_fetch(
|
| 825 |
+
page: Page,
|
| 826 |
+
url: str,
|
| 827 |
+
*,
|
| 828 |
+
filename: str,
|
| 829 |
+
mime_type: str,
|
| 830 |
+
data: bytes,
|
| 831 |
+
field_name: str = "file",
|
| 832 |
+
extra_fields: dict[str, str] | None = None,
|
| 833 |
+
timeout_ms: int = 30000,
|
| 834 |
+
request_id: str | None = None,
|
| 835 |
+
) -> dict[str, Any]:
|
| 836 |
+
logger.info(
|
| 837 |
+
"[fetch] helper=upload_file_via_page_fetch filename=%s mime=%s request_id=%s url=%s page.url=%s",
|
| 838 |
+
filename,
|
| 839 |
+
mime_type,
|
| 840 |
+
request_id,
|
| 841 |
+
_truncate_for_log(url, 120),
|
| 842 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 843 |
+
)
|
| 844 |
+
try:
|
| 845 |
+
result = await asyncio.wait_for(
|
| 846 |
+
page.evaluate(
|
| 847 |
+
PAGE_FETCH_MULTIPART_JS,
|
| 848 |
+
{
|
| 849 |
+
"url": url,
|
| 850 |
+
"filename": filename,
|
| 851 |
+
"mimeType": mime_type,
|
| 852 |
+
"dataBase64": base64.b64encode(data).decode("ascii"),
|
| 853 |
+
"fieldName": field_name,
|
| 854 |
+
"extraFields": extra_fields or {},
|
| 855 |
+
"timeoutMs": timeout_ms,
|
| 856 |
+
},
|
| 857 |
+
),
|
| 858 |
+
timeout=_evaluate_timeout_seconds(timeout_ms),
|
| 859 |
+
)
|
| 860 |
+
except asyncio.TimeoutError as e:
|
| 861 |
+
logger.warning(
|
| 862 |
+
"[fetch] helper=upload_file_via_page_fetch request_id=%s evaluate timeout url=%s page.url=%s",
|
| 863 |
+
request_id,
|
| 864 |
+
_truncate_for_log(url, 120),
|
| 865 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 866 |
+
)
|
| 867 |
+
raise BrowserResourceInvalidError(
|
| 868 |
+
f"page.evaluate timeout after {_evaluate_timeout_seconds(timeout_ms):.1f}s",
|
| 869 |
+
helper_name="upload_file_via_page_fetch",
|
| 870 |
+
operation="page.evaluate",
|
| 871 |
+
stage="evaluate_timeout",
|
| 872 |
+
resource_hint="page",
|
| 873 |
+
request_url=url,
|
| 874 |
+
page_url=_safe_page_url(page),
|
| 875 |
+
request_id=request_id,
|
| 876 |
+
) from e
|
| 877 |
+
except Exception as e:
|
| 878 |
+
classified = _classify_browser_resource_error(
|
| 879 |
+
e,
|
| 880 |
+
helper_name="upload_file_via_page_fetch",
|
| 881 |
+
operation="page.evaluate",
|
| 882 |
+
stage="evaluate",
|
| 883 |
+
request_url=url,
|
| 884 |
+
page=page,
|
| 885 |
+
request_id=request_id,
|
| 886 |
+
)
|
| 887 |
+
if classified is not None:
|
| 888 |
+
raise classified from e
|
| 889 |
+
raise
|
| 890 |
+
return _attach_json_body(result, invalid_message="页面上传返回结果异常")
|
| 891 |
+
|
| 892 |
+
|
| 893 |
+
async def stream_raw_via_context_request(
|
| 894 |
+
context: BrowserContext,
|
| 895 |
+
page: Page | None,
|
| 896 |
+
url: str,
|
| 897 |
+
body: str,
|
| 898 |
+
request_id: str,
|
| 899 |
+
*,
|
| 900 |
+
on_http_error: Callable[[str, dict[str, str] | None], int | None] | None = None,
|
| 901 |
+
on_headers: Callable[[dict[str, str]], None] | None = None,
|
| 902 |
+
fetch_timeout: float = 90.0,
|
| 903 |
+
body_timeout: float = 300.0,
|
| 904 |
+
proxy_url: str | None = None,
|
| 905 |
+
proxy_auth: tuple[str, str] | None = None,
|
| 906 |
+
) -> AsyncIterator[str]:
|
| 907 |
+
"""通过真实流式 HTTP client 发起 completion 请求,避免先读完整 body。"""
|
| 908 |
+
del fetch_timeout
|
| 909 |
+
async for chunk in _stream_via_http_client(
|
| 910 |
+
context,
|
| 911 |
+
page,
|
| 912 |
+
url,
|
| 913 |
+
body,
|
| 914 |
+
request_id,
|
| 915 |
+
on_http_error=on_http_error,
|
| 916 |
+
on_headers=on_headers,
|
| 917 |
+
read_timeout=body_timeout,
|
| 918 |
+
proxy_url=proxy_url,
|
| 919 |
+
proxy_auth=proxy_auth,
|
| 920 |
+
):
|
| 921 |
+
yield chunk
|
| 922 |
+
|
| 923 |
+
|
| 924 |
+
async def stream_raw_via_page_fetch(
|
| 925 |
+
context: BrowserContext,
|
| 926 |
+
page: Page,
|
| 927 |
+
url: str,
|
| 928 |
+
body: str,
|
| 929 |
+
request_id: str,
|
| 930 |
+
*,
|
| 931 |
+
on_http_error: Callable[[str, dict[str, str] | None], int | None] | None = None,
|
| 932 |
+
on_headers: Callable[[dict[str, str]], None] | None = None,
|
| 933 |
+
error_state: dict[str, bool] | None = None,
|
| 934 |
+
fetch_timeout: int = 90,
|
| 935 |
+
read_timeout: float = 60.0,
|
| 936 |
+
) -> AsyncIterator[str]:
|
| 937 |
+
"""
|
| 938 |
+
在浏览器内对 url 发起 POST body,流式回传原始字符串块(含 SSE 等)。
|
| 939 |
+
同一 page 多请求用 request_id 区分 binding,互不串数据。
|
| 940 |
+
通过 CDP Runtime.addBinding 注入 sendChunk_<request_id>,用 Runtime.bindingCalled 接收。
|
| 941 |
+
收到 __headers__: 时解析 JSON 并调用 on_headers(headers);收到 __error__: 时调用 on_http_error(msg);收到 __done__ 结束。
|
| 942 |
+
"""
|
| 943 |
+
chunk_queue: asyncio.Queue[str] = asyncio.Queue()
|
| 944 |
+
binding_name = "sendChunk_" + request_id
|
| 945 |
+
stream_phase = "cdp_setup"
|
| 946 |
+
|
| 947 |
+
def on_binding_called(event: dict[str, Any]) -> None:
|
| 948 |
+
name = event.get("name")
|
| 949 |
+
payload = event.get("payload", "")
|
| 950 |
+
if name == binding_name:
|
| 951 |
+
chunk_queue.put_nowait(
|
| 952 |
+
payload if isinstance(payload, str) else str(payload)
|
| 953 |
+
)
|
| 954 |
+
|
| 955 |
+
def classify_stream_error(
|
| 956 |
+
exc: Exception,
|
| 957 |
+
*,
|
| 958 |
+
stage: str,
|
| 959 |
+
) -> BrowserResourceInvalidError | None:
|
| 960 |
+
return _classify_browser_resource_error(
|
| 961 |
+
exc,
|
| 962 |
+
helper_name="stream_raw_via_page_fetch",
|
| 963 |
+
operation="stream",
|
| 964 |
+
stage=stage,
|
| 965 |
+
request_url=url,
|
| 966 |
+
page=page,
|
| 967 |
+
request_id=request_id,
|
| 968 |
+
stream_phase=stream_phase,
|
| 969 |
+
)
|
| 970 |
+
|
| 971 |
+
cdp = None
|
| 972 |
+
fetch_task: asyncio.Task[None] | None = None
|
| 973 |
+
try:
|
| 974 |
+
try:
|
| 975 |
+
cdp = await context.new_cdp_session(page)
|
| 976 |
+
except Exception as e:
|
| 977 |
+
classified = classify_stream_error(e, stage="new_cdp_session")
|
| 978 |
+
if classified is not None:
|
| 979 |
+
raise classified from e
|
| 980 |
+
raise
|
| 981 |
+
cdp.on("Runtime.bindingCalled", on_binding_called)
|
| 982 |
+
try:
|
| 983 |
+
await cdp.send("Runtime.addBinding", {"name": binding_name})
|
| 984 |
+
except Exception as e:
|
| 985 |
+
classified = classify_stream_error(e, stage="add_binding")
|
| 986 |
+
if classified is not None:
|
| 987 |
+
raise classified from e
|
| 988 |
+
raise
|
| 989 |
+
|
| 990 |
+
logger.info(
|
| 991 |
+
"[fetch] helper=stream_raw_via_page_fetch request_id=%s stage=page.evaluate url=%s page.url=%s",
|
| 992 |
+
request_id,
|
| 993 |
+
_truncate_for_log(url, 120),
|
| 994 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 995 |
+
)
|
| 996 |
+
|
| 997 |
+
async def run_fetch() -> None:
|
| 998 |
+
nonlocal stream_phase
|
| 999 |
+
try:
|
| 1000 |
+
stream_phase = "page_evaluate"
|
| 1001 |
+
await asyncio.wait_for(
|
| 1002 |
+
page.evaluate(
|
| 1003 |
+
PAGE_FETCH_STREAM_JS,
|
| 1004 |
+
{
|
| 1005 |
+
"url": url,
|
| 1006 |
+
"body": body,
|
| 1007 |
+
"bindingName": binding_name,
|
| 1008 |
+
"timeoutMs": max(1, int(fetch_timeout * 1000)),
|
| 1009 |
+
},
|
| 1010 |
+
),
|
| 1011 |
+
timeout=max(float(fetch_timeout) + 5.0, 10.0),
|
| 1012 |
+
)
|
| 1013 |
+
except asyncio.TimeoutError as e:
|
| 1014 |
+
logger.warning(
|
| 1015 |
+
"[fetch] helper=stream_raw_via_page_fetch request_id=%s stage=page.evaluate evaluate timeout url=%s page.url=%s",
|
| 1016 |
+
request_id,
|
| 1017 |
+
_truncate_for_log(url, 120),
|
| 1018 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 1019 |
+
)
|
| 1020 |
+
raise BrowserResourceInvalidError(
|
| 1021 |
+
f"page.evaluate timeout after {max(float(fetch_timeout) + 5.0, 10.0):.1f}s",
|
| 1022 |
+
helper_name="stream_raw_via_page_fetch",
|
| 1023 |
+
operation="stream",
|
| 1024 |
+
stage="evaluate_timeout",
|
| 1025 |
+
resource_hint="page",
|
| 1026 |
+
request_url=url,
|
| 1027 |
+
page_url=_safe_page_url(page),
|
| 1028 |
+
request_id=request_id,
|
| 1029 |
+
stream_phase=stream_phase,
|
| 1030 |
+
) from e
|
| 1031 |
+
except Exception as e:
|
| 1032 |
+
classified = classify_stream_error(e, stage="page.evaluate")
|
| 1033 |
+
if classified is not None:
|
| 1034 |
+
raise classified from e
|
| 1035 |
+
raise
|
| 1036 |
+
|
| 1037 |
+
fetch_task = asyncio.create_task(run_fetch())
|
| 1038 |
+
try:
|
| 1039 |
+
headers = None
|
| 1040 |
+
while True:
|
| 1041 |
+
if fetch_task.done():
|
| 1042 |
+
exc = fetch_task.exception()
|
| 1043 |
+
if exc is not None:
|
| 1044 |
+
raise exc
|
| 1045 |
+
try:
|
| 1046 |
+
chunk = await asyncio.wait_for(
|
| 1047 |
+
chunk_queue.get(), timeout=read_timeout
|
| 1048 |
+
)
|
| 1049 |
+
except asyncio.TimeoutError as e:
|
| 1050 |
+
stream_phase = "body"
|
| 1051 |
+
logger.warning(
|
| 1052 |
+
"[fetch] helper=stream_raw_via_page_fetch request_id=%s stream_phase=%s read timeout url=%s page.url=%s",
|
| 1053 |
+
request_id,
|
| 1054 |
+
stream_phase,
|
| 1055 |
+
_truncate_for_log(url, 120),
|
| 1056 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 1057 |
+
)
|
| 1058 |
+
raise BrowserResourceInvalidError(
|
| 1059 |
+
f"stream read timeout after {read_timeout:.1f}s",
|
| 1060 |
+
helper_name="stream_raw_via_page_fetch",
|
| 1061 |
+
operation="stream",
|
| 1062 |
+
stage="read_timeout",
|
| 1063 |
+
resource_hint="page",
|
| 1064 |
+
request_url=url,
|
| 1065 |
+
page_url=_safe_page_url(page),
|
| 1066 |
+
request_id=request_id,
|
| 1067 |
+
stream_phase=stream_phase,
|
| 1068 |
+
) from e
|
| 1069 |
+
if chunk == "__done__":
|
| 1070 |
+
break
|
| 1071 |
+
if chunk.startswith("__headers__:"):
|
| 1072 |
+
stream_phase = "headers"
|
| 1073 |
+
try:
|
| 1074 |
+
headers = json.loads(chunk[12:])
|
| 1075 |
+
if on_headers and isinstance(headers, dict):
|
| 1076 |
+
on_headers({k: str(v) for k, v in headers.items()})
|
| 1077 |
+
except (json.JSONDecodeError, TypeError) as e:
|
| 1078 |
+
logger.debug("[fetch] 解析 __headers__ 失败: %s", e)
|
| 1079 |
+
continue
|
| 1080 |
+
if chunk.startswith("__error__:"):
|
| 1081 |
+
msg = chunk[10:].strip()
|
| 1082 |
+
saw_terminal = bool(error_state and error_state.get("terminal"))
|
| 1083 |
+
stream_phase = "terminal_event" if saw_terminal else ("body" if headers else "before_headers")
|
| 1084 |
+
if on_http_error:
|
| 1085 |
+
unfreeze_at = on_http_error(msg, headers)
|
| 1086 |
+
if isinstance(unfreeze_at, int):
|
| 1087 |
+
logger.warning("[fetch] __error__ from page: %s", msg)
|
| 1088 |
+
raise AccountFrozenError(msg, unfreeze_at)
|
| 1089 |
+
classified = _classify_browser_resource_error(
|
| 1090 |
+
RuntimeError(msg),
|
| 1091 |
+
helper_name="stream_raw_via_page_fetch",
|
| 1092 |
+
operation="page_fetch_stream",
|
| 1093 |
+
stage="page_error_event",
|
| 1094 |
+
request_url=url,
|
| 1095 |
+
page=page,
|
| 1096 |
+
request_id=request_id,
|
| 1097 |
+
stream_phase=stream_phase,
|
| 1098 |
+
)
|
| 1099 |
+
if classified is not None:
|
| 1100 |
+
raise classified
|
| 1101 |
+
if saw_terminal:
|
| 1102 |
+
logger.info(
|
| 1103 |
+
"[fetch] page fetch disconnected after terminal event request_id=%s stream_phase=%s: %s",
|
| 1104 |
+
request_id,
|
| 1105 |
+
stream_phase,
|
| 1106 |
+
msg,
|
| 1107 |
+
)
|
| 1108 |
+
continue
|
| 1109 |
+
logger.warning(
|
| 1110 |
+
"[fetch] __error__ from page before terminal event request_id=%s stream_phase=%s: %s",
|
| 1111 |
+
request_id,
|
| 1112 |
+
stream_phase,
|
| 1113 |
+
msg,
|
| 1114 |
+
)
|
| 1115 |
+
raise RuntimeError(msg)
|
| 1116 |
+
stream_phase = "body"
|
| 1117 |
+
yield chunk
|
| 1118 |
+
finally:
|
| 1119 |
+
if fetch_task is not None:
|
| 1120 |
+
done, pending = await asyncio.wait({fetch_task}, timeout=5.0)
|
| 1121 |
+
if pending:
|
| 1122 |
+
fetch_task.cancel()
|
| 1123 |
+
fetch_task.add_done_callback(_consume_background_task_result)
|
| 1124 |
+
else:
|
| 1125 |
+
try:
|
| 1126 |
+
fetch_task.result()
|
| 1127 |
+
except asyncio.CancelledError:
|
| 1128 |
+
pass
|
| 1129 |
+
except BrowserResourceInvalidError:
|
| 1130 |
+
pass
|
| 1131 |
+
finally:
|
| 1132 |
+
if cdp is not None:
|
| 1133 |
+
try:
|
| 1134 |
+
await asyncio.wait_for(cdp.detach(), timeout=2.0)
|
| 1135 |
+
except asyncio.TimeoutError:
|
| 1136 |
+
logger.warning(
|
| 1137 |
+
"[fetch] helper=stream_raw_via_page_fetch request_id=%s detach CDP session timeout page.url=%s",
|
| 1138 |
+
request_id,
|
| 1139 |
+
_truncate_for_log(_safe_page_url(page), 120),
|
| 1140 |
+
)
|
| 1141 |
+
except Exception as e:
|
| 1142 |
+
logger.debug("detach CDP session 时异常: %s", e)
|
| 1143 |
+
|
| 1144 |
+
|
| 1145 |
+
def parse_sse_to_events(buffer: str, chunk: str) -> tuple[str, list[str]]:
|
| 1146 |
+
"""
|
| 1147 |
+
把 chunk 追加到 buffer,按行拆出 data: 后的 payload 列表,返回 (剩余 buffer, payload 列表)。
|
| 1148 |
+
接入方对每个 payload 自行 JSON 解析并抽取 text / message_id / error。
|
| 1149 |
+
"""
|
| 1150 |
+
buffer += chunk
|
| 1151 |
+
lines = buffer.split("\n")
|
| 1152 |
+
buffer = lines[-1]
|
| 1153 |
+
payloads: list[str] = []
|
| 1154 |
+
for line in lines[:-1]:
|
| 1155 |
+
line = line.strip()
|
| 1156 |
+
if not line.startswith("data: "):
|
| 1157 |
+
continue
|
| 1158 |
+
payload = line[6:].strip()
|
| 1159 |
+
if payload == "[DONE]" or not payload:
|
| 1160 |
+
continue
|
| 1161 |
+
payloads.append(payload)
|
| 1162 |
+
return (buffer, payloads)
|
| 1163 |
+
|
| 1164 |
+
|
| 1165 |
+
async def stream_completion_via_sse(
|
| 1166 |
+
context: BrowserContext,
|
| 1167 |
+
page: Page,
|
| 1168 |
+
url: str,
|
| 1169 |
+
body: str,
|
| 1170 |
+
parse_event: ParseSseEvent,
|
| 1171 |
+
request_id: str,
|
| 1172 |
+
*,
|
| 1173 |
+
on_http_error: Callable,
|
| 1174 |
+
is_terminal_event: Callable[[str], bool] | None = None,
|
| 1175 |
+
collect_message_id: list[str] | None = None,
|
| 1176 |
+
first_token_timeout: float = 30.0,
|
| 1177 |
+
transport: str = "page_fetch",
|
| 1178 |
+
transport_options: dict[str, Any] | None = None,
|
| 1179 |
+
) -> AsyncIterator[str]:
|
| 1180 |
+
"""
|
| 1181 |
+
在浏览器内 POST 拿到流,按 SSE 行拆成 data 事件,用 parse_event(payload) 解析每条;
|
| 1182 |
+
逐块 yield 文本,可选把 message_id 收集到 collect_message_id。
|
| 1183 |
+
parse_event(payload) 返回 (texts, message_id, error),error 非空时仅打 debug 日志不抛错。
|
| 1184 |
+
"""
|
| 1185 |
+
buffer = ""
|
| 1186 |
+
stream_state: dict[str, bool] = {"terminal": False}
|
| 1187 |
+
saw_text = False
|
| 1188 |
+
loop = asyncio.get_running_loop()
|
| 1189 |
+
started_at = loop.time()
|
| 1190 |
+
opts = dict(transport_options or {})
|
| 1191 |
+
if transport == "context_request":
|
| 1192 |
+
raw_stream = stream_raw_via_context_request(
|
| 1193 |
+
context,
|
| 1194 |
+
page,
|
| 1195 |
+
url,
|
| 1196 |
+
body,
|
| 1197 |
+
request_id,
|
| 1198 |
+
on_http_error=on_http_error,
|
| 1199 |
+
**opts,
|
| 1200 |
+
)
|
| 1201 |
+
resource_hint = "transport"
|
| 1202 |
+
else:
|
| 1203 |
+
raw_stream = stream_raw_via_page_fetch(
|
| 1204 |
+
context,
|
| 1205 |
+
page,
|
| 1206 |
+
url,
|
| 1207 |
+
body,
|
| 1208 |
+
request_id,
|
| 1209 |
+
on_http_error=on_http_error,
|
| 1210 |
+
error_state=stream_state,
|
| 1211 |
+
)
|
| 1212 |
+
resource_hint = "page"
|
| 1213 |
+
async for chunk in raw_stream:
|
| 1214 |
+
buffer, payloads = parse_sse_to_events(buffer, chunk)
|
| 1215 |
+
for payload in payloads:
|
| 1216 |
+
if is_terminal_event and is_terminal_event(payload):
|
| 1217 |
+
stream_state["terminal"] = True
|
| 1218 |
+
try:
|
| 1219 |
+
texts, message_id, error = parse_event(payload)
|
| 1220 |
+
except Exception as e:
|
| 1221 |
+
logger.debug("parse_stream_event 单条解析异常: %s", e)
|
| 1222 |
+
continue
|
| 1223 |
+
if error:
|
| 1224 |
+
logger.warning("SSE error from upstream: %s", error)
|
| 1225 |
+
raise RuntimeError(error)
|
| 1226 |
+
if message_id and collect_message_id is not None:
|
| 1227 |
+
collect_message_id.append(message_id)
|
| 1228 |
+
for t in texts:
|
| 1229 |
+
saw_text = True
|
| 1230 |
+
yield t
|
| 1231 |
+
if (
|
| 1232 |
+
not saw_text
|
| 1233 |
+
and not stream_state["terminal"]
|
| 1234 |
+
and loop.time() - started_at >= first_token_timeout
|
| 1235 |
+
):
|
| 1236 |
+
raise BrowserResourceInvalidError(
|
| 1237 |
+
f"no text token received within {first_token_timeout:.1f}s",
|
| 1238 |
+
helper_name="stream_completion_via_sse",
|
| 1239 |
+
operation="parse_stream",
|
| 1240 |
+
stage="first_token_timeout",
|
| 1241 |
+
resource_hint=resource_hint,
|
| 1242 |
+
request_url=url,
|
| 1243 |
+
page_url=_safe_page_url(page),
|
| 1244 |
+
request_id=request_id,
|
| 1245 |
+
stream_phase="before_first_text",
|
| 1246 |
+
)
|
core/protocol/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""协议层:客户端协议适配与 Canonical 模型。"""
|
core/protocol/anthropic.py
ADDED
|
@@ -0,0 +1,461 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Anthropic 协议适配器。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import json
|
| 6 |
+
import time
|
| 7 |
+
import uuid as uuid_mod
|
| 8 |
+
from collections.abc import AsyncIterator
|
| 9 |
+
from typing import Any
|
| 10 |
+
|
| 11 |
+
from core.api.conv_parser import (
|
| 12 |
+
decode_latest_session_id,
|
| 13 |
+
extract_session_id_marker,
|
| 14 |
+
strip_session_id_suffix,
|
| 15 |
+
)
|
| 16 |
+
from core.api.react import format_react_final_answer_content, parse_react_output
|
| 17 |
+
from core.api.react_stream_parser import ReactStreamParser
|
| 18 |
+
from core.hub.schemas import OpenAIStreamEvent
|
| 19 |
+
from core.protocol.base import ProtocolAdapter
|
| 20 |
+
from core.protocol.schemas import (
|
| 21 |
+
CanonicalChatRequest,
|
| 22 |
+
CanonicalContentBlock,
|
| 23 |
+
CanonicalMessage,
|
| 24 |
+
CanonicalToolSpec,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class AnthropicProtocolAdapter(ProtocolAdapter):
|
| 29 |
+
protocol_name = "anthropic"
|
| 30 |
+
|
| 31 |
+
def parse_request(
|
| 32 |
+
self,
|
| 33 |
+
provider: str,
|
| 34 |
+
raw_body: dict[str, Any],
|
| 35 |
+
) -> CanonicalChatRequest:
|
| 36 |
+
messages = raw_body.get("messages") or []
|
| 37 |
+
if not isinstance(messages, list):
|
| 38 |
+
raise ValueError("messages 必须为数组")
|
| 39 |
+
system_blocks = self._parse_content(raw_body.get("system"))
|
| 40 |
+
canonical_messages: list[CanonicalMessage] = []
|
| 41 |
+
resume_session_id: str | None = None
|
| 42 |
+
for item in messages:
|
| 43 |
+
if not isinstance(item, dict):
|
| 44 |
+
continue
|
| 45 |
+
blocks = self._parse_content(item.get("content"))
|
| 46 |
+
for block in blocks:
|
| 47 |
+
text = block.text or ""
|
| 48 |
+
decoded = decode_latest_session_id(text)
|
| 49 |
+
if decoded:
|
| 50 |
+
resume_session_id = decoded
|
| 51 |
+
block.text = strip_session_id_suffix(text)
|
| 52 |
+
canonical_messages.append(
|
| 53 |
+
CanonicalMessage(
|
| 54 |
+
role=str(item.get("role") or "user"),
|
| 55 |
+
content=blocks,
|
| 56 |
+
)
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
for block in system_blocks:
|
| 60 |
+
text = block.text or ""
|
| 61 |
+
decoded = decode_latest_session_id(text)
|
| 62 |
+
if decoded:
|
| 63 |
+
resume_session_id = decoded
|
| 64 |
+
block.text = strip_session_id_suffix(text)
|
| 65 |
+
|
| 66 |
+
tools = [self._parse_tool(tool) for tool in list(raw_body.get("tools") or [])]
|
| 67 |
+
stop_sequences = raw_body.get("stop_sequences") or []
|
| 68 |
+
return CanonicalChatRequest(
|
| 69 |
+
protocol="anthropic",
|
| 70 |
+
provider=provider,
|
| 71 |
+
model=str(raw_body.get("model") or ""),
|
| 72 |
+
system=system_blocks,
|
| 73 |
+
messages=canonical_messages,
|
| 74 |
+
stream=bool(raw_body.get("stream") or False),
|
| 75 |
+
max_tokens=raw_body.get("max_tokens"),
|
| 76 |
+
temperature=raw_body.get("temperature"),
|
| 77 |
+
top_p=raw_body.get("top_p"),
|
| 78 |
+
stop_sequences=[str(v) for v in stop_sequences if isinstance(v, str)],
|
| 79 |
+
tools=tools,
|
| 80 |
+
tool_choice=raw_body.get("tool_choice"),
|
| 81 |
+
resume_session_id=resume_session_id,
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
def render_non_stream(
|
| 85 |
+
self,
|
| 86 |
+
req: CanonicalChatRequest,
|
| 87 |
+
raw_events: list[OpenAIStreamEvent],
|
| 88 |
+
) -> dict[str, Any]:
|
| 89 |
+
full = "".join(
|
| 90 |
+
ev.content or ""
|
| 91 |
+
for ev in raw_events
|
| 92 |
+
if ev.type == "content_delta" and ev.content
|
| 93 |
+
)
|
| 94 |
+
session_marker = extract_session_id_marker(full)
|
| 95 |
+
text = strip_session_id_suffix(full)
|
| 96 |
+
message_id = self._message_id(req)
|
| 97 |
+
if req.tools:
|
| 98 |
+
parsed = parse_react_output(text)
|
| 99 |
+
if parsed and parsed.get("type") == "tool_call":
|
| 100 |
+
content: list[dict[str, Any]] = [
|
| 101 |
+
{
|
| 102 |
+
"type": "tool_use",
|
| 103 |
+
"id": f"toolu_{uuid_mod.uuid4().hex[:24]}",
|
| 104 |
+
"name": str(parsed.get("tool") or ""),
|
| 105 |
+
"input": parsed.get("params") or {},
|
| 106 |
+
}
|
| 107 |
+
]
|
| 108 |
+
if session_marker:
|
| 109 |
+
content.append({"type": "text", "text": session_marker})
|
| 110 |
+
return self._message_response(
|
| 111 |
+
req,
|
| 112 |
+
message_id,
|
| 113 |
+
content,
|
| 114 |
+
stop_reason="tool_use",
|
| 115 |
+
)
|
| 116 |
+
rendered = format_react_final_answer_content(text)
|
| 117 |
+
else:
|
| 118 |
+
rendered = text
|
| 119 |
+
if session_marker:
|
| 120 |
+
rendered += session_marker
|
| 121 |
+
return self._message_response(
|
| 122 |
+
req,
|
| 123 |
+
message_id,
|
| 124 |
+
[{"type": "text", "text": rendered}],
|
| 125 |
+
stop_reason="end_turn",
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
async def render_stream(
|
| 129 |
+
self,
|
| 130 |
+
req: CanonicalChatRequest,
|
| 131 |
+
raw_stream: AsyncIterator[OpenAIStreamEvent],
|
| 132 |
+
) -> AsyncIterator[str]:
|
| 133 |
+
message_id = self._message_id(req)
|
| 134 |
+
parser = ReactStreamParser(
|
| 135 |
+
chat_id=f"chatcmpl-{uuid_mod.uuid4().hex[:24]}",
|
| 136 |
+
model=req.model,
|
| 137 |
+
created=int(time.time()),
|
| 138 |
+
has_tools=bool(req.tools),
|
| 139 |
+
)
|
| 140 |
+
session_marker = ""
|
| 141 |
+
translator = _AnthropicStreamTranslator(req, message_id)
|
| 142 |
+
async for event in raw_stream:
|
| 143 |
+
if event.type == "content_delta" and event.content:
|
| 144 |
+
chunk = event.content
|
| 145 |
+
if extract_session_id_marker(chunk) and not strip_session_id_suffix(
|
| 146 |
+
chunk
|
| 147 |
+
):
|
| 148 |
+
session_marker = chunk
|
| 149 |
+
continue
|
| 150 |
+
for sse in parser.feed(chunk):
|
| 151 |
+
for out in translator.feed_openai_sse(sse):
|
| 152 |
+
yield out
|
| 153 |
+
elif event.type == "finish":
|
| 154 |
+
break
|
| 155 |
+
for sse in parser.finish():
|
| 156 |
+
for out in translator.feed_openai_sse(sse, session_marker=session_marker):
|
| 157 |
+
yield out
|
| 158 |
+
|
| 159 |
+
def render_error(self, exc: Exception) -> tuple[int, dict[str, Any]]:
|
| 160 |
+
status = 400 if isinstance(exc, ValueError) else 500
|
| 161 |
+
err_type = "invalid_request_error" if status == 400 else "api_error"
|
| 162 |
+
return (
|
| 163 |
+
status,
|
| 164 |
+
{
|
| 165 |
+
"type": "error",
|
| 166 |
+
"error": {"type": err_type, "message": str(exc)},
|
| 167 |
+
},
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
@staticmethod
|
| 171 |
+
def _parse_tool(tool: dict[str, Any]) -> CanonicalToolSpec:
|
| 172 |
+
return CanonicalToolSpec(
|
| 173 |
+
name=str(tool.get("name") or ""),
|
| 174 |
+
description=str(tool.get("description") or ""),
|
| 175 |
+
input_schema=tool.get("input_schema") or {},
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
@staticmethod
|
| 179 |
+
def _parse_content(value: Any) -> list[CanonicalContentBlock]:
|
| 180 |
+
if value is None:
|
| 181 |
+
return []
|
| 182 |
+
if isinstance(value, str):
|
| 183 |
+
return [CanonicalContentBlock(type="text", text=value)]
|
| 184 |
+
if isinstance(value, list):
|
| 185 |
+
blocks: list[CanonicalContentBlock] = []
|
| 186 |
+
for item in value:
|
| 187 |
+
if isinstance(item, str):
|
| 188 |
+
blocks.append(CanonicalContentBlock(type="text", text=item))
|
| 189 |
+
continue
|
| 190 |
+
if not isinstance(item, dict):
|
| 191 |
+
continue
|
| 192 |
+
item_type = str(item.get("type") or "")
|
| 193 |
+
if item_type == "text":
|
| 194 |
+
blocks.append(
|
| 195 |
+
CanonicalContentBlock(
|
| 196 |
+
type="text", text=str(item.get("text") or "")
|
| 197 |
+
)
|
| 198 |
+
)
|
| 199 |
+
elif item_type == "image":
|
| 200 |
+
source = item.get("source") or {}
|
| 201 |
+
source_type = source.get("type")
|
| 202 |
+
if source_type == "base64":
|
| 203 |
+
blocks.append(
|
| 204 |
+
CanonicalContentBlock(
|
| 205 |
+
type="image",
|
| 206 |
+
mime_type=str(source.get("media_type") or ""),
|
| 207 |
+
data=str(source.get("data") or ""),
|
| 208 |
+
)
|
| 209 |
+
)
|
| 210 |
+
elif item_type == "tool_result":
|
| 211 |
+
text_parts = AnthropicProtocolAdapter._parse_content(
|
| 212 |
+
item.get("content")
|
| 213 |
+
)
|
| 214 |
+
blocks.append(
|
| 215 |
+
CanonicalContentBlock(
|
| 216 |
+
type="tool_result",
|
| 217 |
+
tool_use_id=str(item.get("tool_use_id") or ""),
|
| 218 |
+
text="\n".join(
|
| 219 |
+
part.text or ""
|
| 220 |
+
for part in text_parts
|
| 221 |
+
if part.type == "text"
|
| 222 |
+
),
|
| 223 |
+
is_error=bool(item.get("is_error") or False),
|
| 224 |
+
)
|
| 225 |
+
)
|
| 226 |
+
return blocks
|
| 227 |
+
raise ValueError("content 格式不合法")
|
| 228 |
+
|
| 229 |
+
@staticmethod
|
| 230 |
+
def _message_response(
|
| 231 |
+
req: CanonicalChatRequest,
|
| 232 |
+
message_id: str,
|
| 233 |
+
content: list[dict[str, Any]],
|
| 234 |
+
*,
|
| 235 |
+
stop_reason: str,
|
| 236 |
+
) -> dict[str, Any]:
|
| 237 |
+
return {
|
| 238 |
+
"id": message_id,
|
| 239 |
+
"type": "message",
|
| 240 |
+
"role": "assistant",
|
| 241 |
+
"model": req.model,
|
| 242 |
+
"content": content,
|
| 243 |
+
"stop_reason": stop_reason,
|
| 244 |
+
"stop_sequence": None,
|
| 245 |
+
"usage": {"input_tokens": 0, "output_tokens": 0},
|
| 246 |
+
}
|
| 247 |
+
|
| 248 |
+
@staticmethod
|
| 249 |
+
def _message_id(req: CanonicalChatRequest) -> str:
|
| 250 |
+
return str(
|
| 251 |
+
req.metadata.setdefault(
|
| 252 |
+
"anthropic_message_id", f"msg_{uuid_mod.uuid4().hex}"
|
| 253 |
+
)
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
class _AnthropicStreamTranslator:
|
| 258 |
+
def __init__(self, req: CanonicalChatRequest, message_id: str) -> None:
|
| 259 |
+
self._req = req
|
| 260 |
+
self._message_id = message_id
|
| 261 |
+
self._started = False
|
| 262 |
+
self._current_block_type: str | None = None
|
| 263 |
+
self._current_index = -1
|
| 264 |
+
self._pending_tool_id: str | None = None
|
| 265 |
+
self._pending_tool_name: str | None = None
|
| 266 |
+
self._stopped = False
|
| 267 |
+
|
| 268 |
+
def feed_openai_sse(
|
| 269 |
+
self,
|
| 270 |
+
sse: str,
|
| 271 |
+
*,
|
| 272 |
+
session_marker: str = "",
|
| 273 |
+
) -> list[str]:
|
| 274 |
+
lines = [line for line in sse.splitlines() if line.startswith("data: ")]
|
| 275 |
+
out: list[str] = []
|
| 276 |
+
for line in lines:
|
| 277 |
+
payload = line[6:].strip()
|
| 278 |
+
if payload == "[DONE]":
|
| 279 |
+
continue
|
| 280 |
+
obj = json.loads(payload)
|
| 281 |
+
choice = (obj.get("choices") or [{}])[0]
|
| 282 |
+
delta = choice.get("delta") or {}
|
| 283 |
+
finish_reason = choice.get("finish_reason")
|
| 284 |
+
if not self._started:
|
| 285 |
+
out.append(
|
| 286 |
+
self._event(
|
| 287 |
+
"message_start",
|
| 288 |
+
{
|
| 289 |
+
"type": "message_start",
|
| 290 |
+
"message": {
|
| 291 |
+
"id": self._message_id,
|
| 292 |
+
"type": "message",
|
| 293 |
+
"role": "assistant",
|
| 294 |
+
"model": self._req.model,
|
| 295 |
+
"content": [],
|
| 296 |
+
"stop_reason": None,
|
| 297 |
+
"stop_sequence": None,
|
| 298 |
+
"usage": {"input_tokens": 0, "output_tokens": 0},
|
| 299 |
+
},
|
| 300 |
+
},
|
| 301 |
+
)
|
| 302 |
+
)
|
| 303 |
+
self._started = True
|
| 304 |
+
|
| 305 |
+
content = delta.get("content")
|
| 306 |
+
if isinstance(content, str) and content:
|
| 307 |
+
out.extend(self._ensure_text_block())
|
| 308 |
+
out.append(
|
| 309 |
+
self._event(
|
| 310 |
+
"content_block_delta",
|
| 311 |
+
{
|
| 312 |
+
"type": "content_block_delta",
|
| 313 |
+
"index": self._current_index,
|
| 314 |
+
"delta": {"type": "text_delta", "text": content},
|
| 315 |
+
},
|
| 316 |
+
)
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
tool_calls = delta.get("tool_calls") or []
|
| 320 |
+
if tool_calls:
|
| 321 |
+
head = tool_calls[0]
|
| 322 |
+
if head.get("id") and head.get("function", {}).get("name") is not None:
|
| 323 |
+
out.extend(self._close_current_block())
|
| 324 |
+
self._current_index += 1
|
| 325 |
+
self._current_block_type = "tool_use"
|
| 326 |
+
self._pending_tool_id = str(head.get("id") or "")
|
| 327 |
+
self._pending_tool_name = str(
|
| 328 |
+
head.get("function", {}).get("name") or ""
|
| 329 |
+
)
|
| 330 |
+
out.append(
|
| 331 |
+
self._event(
|
| 332 |
+
"content_block_start",
|
| 333 |
+
{
|
| 334 |
+
"type": "content_block_start",
|
| 335 |
+
"index": self._current_index,
|
| 336 |
+
"content_block": {
|
| 337 |
+
"type": "tool_use",
|
| 338 |
+
"id": self._pending_tool_id,
|
| 339 |
+
"name": self._pending_tool_name,
|
| 340 |
+
"input": {},
|
| 341 |
+
},
|
| 342 |
+
},
|
| 343 |
+
)
|
| 344 |
+
)
|
| 345 |
+
args_delta = head.get("function", {}).get("arguments")
|
| 346 |
+
if args_delta:
|
| 347 |
+
out.append(
|
| 348 |
+
self._event(
|
| 349 |
+
"content_block_delta",
|
| 350 |
+
{
|
| 351 |
+
"type": "content_block_delta",
|
| 352 |
+
"index": self._current_index,
|
| 353 |
+
"delta": {
|
| 354 |
+
"type": "input_json_delta",
|
| 355 |
+
"partial_json": str(args_delta),
|
| 356 |
+
},
|
| 357 |
+
},
|
| 358 |
+
)
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
if finish_reason:
|
| 362 |
+
if session_marker:
|
| 363 |
+
if finish_reason == "tool_calls":
|
| 364 |
+
out.extend(self._close_current_block())
|
| 365 |
+
out.extend(self._emit_marker_text_block(session_marker))
|
| 366 |
+
else:
|
| 367 |
+
out.extend(self._ensure_text_block())
|
| 368 |
+
out.append(
|
| 369 |
+
self._event(
|
| 370 |
+
"content_block_delta",
|
| 371 |
+
{
|
| 372 |
+
"type": "content_block_delta",
|
| 373 |
+
"index": self._current_index,
|
| 374 |
+
"delta": {
|
| 375 |
+
"type": "text_delta",
|
| 376 |
+
"text": session_marker,
|
| 377 |
+
},
|
| 378 |
+
},
|
| 379 |
+
)
|
| 380 |
+
)
|
| 381 |
+
out.extend(self._close_current_block())
|
| 382 |
+
stop_reason = (
|
| 383 |
+
"tool_use" if finish_reason == "tool_calls" else "end_turn"
|
| 384 |
+
)
|
| 385 |
+
out.append(
|
| 386 |
+
self._event(
|
| 387 |
+
"message_delta",
|
| 388 |
+
{
|
| 389 |
+
"type": "message_delta",
|
| 390 |
+
"delta": {
|
| 391 |
+
"stop_reason": stop_reason,
|
| 392 |
+
"stop_sequence": None,
|
| 393 |
+
},
|
| 394 |
+
"usage": {"output_tokens": 0},
|
| 395 |
+
},
|
| 396 |
+
)
|
| 397 |
+
)
|
| 398 |
+
out.append(self._event("message_stop", {"type": "message_stop"}))
|
| 399 |
+
self._stopped = True
|
| 400 |
+
return out
|
| 401 |
+
|
| 402 |
+
def _ensure_text_block(self) -> list[str]:
|
| 403 |
+
if self._current_block_type == "text":
|
| 404 |
+
return []
|
| 405 |
+
out = self._close_current_block()
|
| 406 |
+
self._current_index += 1
|
| 407 |
+
self._current_block_type = "text"
|
| 408 |
+
out.append(
|
| 409 |
+
self._event(
|
| 410 |
+
"content_block_start",
|
| 411 |
+
{
|
| 412 |
+
"type": "content_block_start",
|
| 413 |
+
"index": self._current_index,
|
| 414 |
+
"content_block": {"type": "text", "text": ""},
|
| 415 |
+
},
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
return out
|
| 419 |
+
|
| 420 |
+
def _emit_marker_text_block(self, marker: str) -> list[str]:
|
| 421 |
+
self._current_index += 1
|
| 422 |
+
self._current_block_type = "text"
|
| 423 |
+
return [
|
| 424 |
+
self._event(
|
| 425 |
+
"content_block_start",
|
| 426 |
+
{
|
| 427 |
+
"type": "content_block_start",
|
| 428 |
+
"index": self._current_index,
|
| 429 |
+
"content_block": {"type": "text", "text": ""},
|
| 430 |
+
},
|
| 431 |
+
),
|
| 432 |
+
self._event(
|
| 433 |
+
"content_block_delta",
|
| 434 |
+
{
|
| 435 |
+
"type": "content_block_delta",
|
| 436 |
+
"index": self._current_index,
|
| 437 |
+
"delta": {"type": "text_delta", "text": marker},
|
| 438 |
+
},
|
| 439 |
+
),
|
| 440 |
+
self._event(
|
| 441 |
+
"content_block_stop",
|
| 442 |
+
{"type": "content_block_stop", "index": self._current_index},
|
| 443 |
+
),
|
| 444 |
+
]
|
| 445 |
+
|
| 446 |
+
def _close_current_block(self) -> list[str]:
|
| 447 |
+
if self._current_block_type is None:
|
| 448 |
+
return []
|
| 449 |
+
block_index = self._current_index
|
| 450 |
+
self._current_block_type = None
|
| 451 |
+
return [
|
| 452 |
+
self._event(
|
| 453 |
+
"content_block_stop",
|
| 454 |
+
{"type": "content_block_stop", "index": block_index},
|
| 455 |
+
)
|
| 456 |
+
]
|
| 457 |
+
|
| 458 |
+
@staticmethod
|
| 459 |
+
def _event(event_name: str, payload: dict[str, Any]) -> str:
|
| 460 |
+
del event_name
|
| 461 |
+
return f"event: {payload['type']}\ndata: {json.dumps(payload, ensure_ascii=False)}\n\n"
|
core/protocol/base.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""协议适配器抽象。内部统一以 OpenAI 语义事件流为中间态。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from collections.abc import AsyncIterator
|
| 7 |
+
from typing import Any
|
| 8 |
+
|
| 9 |
+
from core.hub.schemas import OpenAIStreamEvent
|
| 10 |
+
from core.protocol.schemas import CanonicalChatRequest
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class ProtocolAdapter(ABC):
|
| 14 |
+
protocol_name: str
|
| 15 |
+
|
| 16 |
+
@abstractmethod
|
| 17 |
+
def parse_request(
|
| 18 |
+
self,
|
| 19 |
+
provider: str,
|
| 20 |
+
raw_body: dict[str, Any],
|
| 21 |
+
) -> CanonicalChatRequest: ...
|
| 22 |
+
|
| 23 |
+
@abstractmethod
|
| 24 |
+
def render_non_stream(
|
| 25 |
+
self,
|
| 26 |
+
req: CanonicalChatRequest,
|
| 27 |
+
raw_events: list[OpenAIStreamEvent],
|
| 28 |
+
) -> dict[str, Any]: ...
|
| 29 |
+
|
| 30 |
+
@abstractmethod
|
| 31 |
+
def render_stream(
|
| 32 |
+
self,
|
| 33 |
+
req: CanonicalChatRequest,
|
| 34 |
+
raw_stream: AsyncIterator[OpenAIStreamEvent],
|
| 35 |
+
) -> AsyncIterator[str]: ...
|
| 36 |
+
|
| 37 |
+
@abstractmethod
|
| 38 |
+
def render_error(self, exc: Exception) -> tuple[int, dict[str, Any]]: ...
|
core/protocol/images.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""图片输入解析与下载。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import asyncio
|
| 6 |
+
import base64
|
| 7 |
+
import imghdr
|
| 8 |
+
import mimetypes
|
| 9 |
+
import urllib.parse
|
| 10 |
+
import urllib.request
|
| 11 |
+
from dataclasses import dataclass
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
SUPPORTED_IMAGE_MIME_TYPES = {
|
| 15 |
+
"image/png",
|
| 16 |
+
"image/jpeg",
|
| 17 |
+
"image/webp",
|
| 18 |
+
"image/gif",
|
| 19 |
+
}
|
| 20 |
+
MAX_IMAGE_BYTES = 10 * 1024 * 1024
|
| 21 |
+
MAX_IMAGE_COUNT = 5
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@dataclass
|
| 25 |
+
class PreparedImage:
|
| 26 |
+
filename: str
|
| 27 |
+
mime_type: str
|
| 28 |
+
data: bytes
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _validate_image_bytes(data: bytes, mime_type: str) -> None:
|
| 32 |
+
if mime_type not in SUPPORTED_IMAGE_MIME_TYPES:
|
| 33 |
+
raise ValueError(f"暂不支持的图片类型: {mime_type}")
|
| 34 |
+
if len(data) > MAX_IMAGE_BYTES:
|
| 35 |
+
raise ValueError("单张图片不能超过 10MB")
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _default_filename(mime_type: str, *, prefix: str = "image") -> str:
|
| 39 |
+
ext = mimetypes.guess_extension(mime_type) or ".bin"
|
| 40 |
+
if ext == ".jpe":
|
| 41 |
+
ext = ".jpg"
|
| 42 |
+
return f"{prefix}{ext}"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def parse_data_url(url: str, *, prefix: str = "image") -> PreparedImage:
|
| 46 |
+
if not url.startswith("data:") or ";base64," not in url:
|
| 47 |
+
raise ValueError("仅支持 data:image/...;base64,... 格式")
|
| 48 |
+
header, payload = url.split(",", 1)
|
| 49 |
+
mime_type = header[5:].split(";", 1)[0].strip().lower()
|
| 50 |
+
data = base64.b64decode(payload, validate=True)
|
| 51 |
+
_validate_image_bytes(data, mime_type)
|
| 52 |
+
return PreparedImage(
|
| 53 |
+
filename=_default_filename(mime_type, prefix=prefix),
|
| 54 |
+
mime_type=mime_type,
|
| 55 |
+
data=data,
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def parse_base64_image(
|
| 60 |
+
data_b64: str,
|
| 61 |
+
mime_type: str,
|
| 62 |
+
*,
|
| 63 |
+
prefix: str = "image",
|
| 64 |
+
) -> PreparedImage:
|
| 65 |
+
mime = mime_type.strip().lower()
|
| 66 |
+
data = base64.b64decode(data_b64, validate=True)
|
| 67 |
+
_validate_image_bytes(data, mime)
|
| 68 |
+
return PreparedImage(
|
| 69 |
+
filename=_default_filename(mime, prefix=prefix),
|
| 70 |
+
mime_type=mime,
|
| 71 |
+
data=data,
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def _sniff_mime_type(data: bytes, url: str) -> str:
|
| 76 |
+
kind = imghdr.what(None, data)
|
| 77 |
+
if kind == "jpeg":
|
| 78 |
+
return "image/jpeg"
|
| 79 |
+
if kind in {"png", "gif", "webp"}:
|
| 80 |
+
return f"image/{kind}"
|
| 81 |
+
guessed, _ = mimetypes.guess_type(url)
|
| 82 |
+
return (guessed or "application/octet-stream").lower()
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def _download_remote_image_sync(url: str, *, prefix: str = "image") -> PreparedImage:
|
| 86 |
+
parsed = urllib.parse.urlparse(url)
|
| 87 |
+
if parsed.scheme not in {"http", "https"}:
|
| 88 |
+
raise ValueError("image_url 仅支持 http/https 或 data URL")
|
| 89 |
+
req = urllib.request.Request(
|
| 90 |
+
url,
|
| 91 |
+
headers={"User-Agent": "web2api/1.0", "Accept": "image/*"},
|
| 92 |
+
)
|
| 93 |
+
with urllib.request.urlopen(req, timeout=20) as resp:
|
| 94 |
+
data = resp.read(MAX_IMAGE_BYTES + 1)
|
| 95 |
+
mime_type = str(resp.headers.get_content_type() or "").lower()
|
| 96 |
+
if not mime_type or mime_type == "application/octet-stream":
|
| 97 |
+
mime_type = _sniff_mime_type(data, url)
|
| 98 |
+
_validate_image_bytes(data, mime_type)
|
| 99 |
+
filename = urllib.parse.unquote(
|
| 100 |
+
parsed.path.rsplit("/", 1)[-1]
|
| 101 |
+
) or _default_filename(mime_type, prefix=prefix)
|
| 102 |
+
if "." not in filename:
|
| 103 |
+
filename = _default_filename(mime_type, prefix=prefix)
|
| 104 |
+
return PreparedImage(filename=filename, mime_type=mime_type, data=data)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
async def download_remote_image(url: str, *, prefix: str = "image") -> PreparedImage:
|
| 108 |
+
return await asyncio.to_thread(_download_remote_image_sync, url, prefix=prefix)
|
core/protocol/openai.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""OpenAI 协议适配器。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import json
|
| 6 |
+
import re
|
| 7 |
+
import time
|
| 8 |
+
import uuid as uuid_mod
|
| 9 |
+
from collections.abc import AsyncIterator
|
| 10 |
+
from typing import Any
|
| 11 |
+
|
| 12 |
+
from core.api.conv_parser import (
|
| 13 |
+
extract_session_id_marker,
|
| 14 |
+
parse_conv_uuid_from_messages,
|
| 15 |
+
strip_session_id_suffix,
|
| 16 |
+
)
|
| 17 |
+
from core.api.function_call import build_tool_calls_response
|
| 18 |
+
from core.api.react import (
|
| 19 |
+
format_react_final_answer_content,
|
| 20 |
+
parse_react_output,
|
| 21 |
+
react_output_to_tool_calls,
|
| 22 |
+
)
|
| 23 |
+
from core.api.react_stream_parser import ReactStreamParser
|
| 24 |
+
from core.api.schemas import OpenAIChatRequest, OpenAIContentPart, OpenAIMessage
|
| 25 |
+
from core.hub.schemas import OpenAIStreamEvent
|
| 26 |
+
from core.protocol.base import ProtocolAdapter
|
| 27 |
+
from core.protocol.schemas import (
|
| 28 |
+
CanonicalChatRequest,
|
| 29 |
+
CanonicalContentBlock,
|
| 30 |
+
CanonicalMessage,
|
| 31 |
+
CanonicalToolSpec,
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class OpenAIProtocolAdapter(ProtocolAdapter):
|
| 36 |
+
protocol_name = "openai"
|
| 37 |
+
|
| 38 |
+
def parse_request(
|
| 39 |
+
self,
|
| 40 |
+
provider: str,
|
| 41 |
+
raw_body: dict[str, Any],
|
| 42 |
+
) -> CanonicalChatRequest:
|
| 43 |
+
req = OpenAIChatRequest.model_validate(raw_body)
|
| 44 |
+
resume_session_id = parse_conv_uuid_from_messages(
|
| 45 |
+
[self._message_to_raw_dict(m) for m in req.messages]
|
| 46 |
+
)
|
| 47 |
+
system_blocks: list[CanonicalContentBlock] = []
|
| 48 |
+
messages: list[CanonicalMessage] = []
|
| 49 |
+
for msg in req.messages:
|
| 50 |
+
blocks = self._to_blocks(msg.content)
|
| 51 |
+
if msg.role == "system":
|
| 52 |
+
system_blocks.extend(blocks)
|
| 53 |
+
else:
|
| 54 |
+
messages.append(CanonicalMessage(role=msg.role, content=blocks))
|
| 55 |
+
tools = [self._to_tool_spec(tool) for tool in list(req.tools or [])]
|
| 56 |
+
return CanonicalChatRequest(
|
| 57 |
+
protocol="openai",
|
| 58 |
+
provider=provider,
|
| 59 |
+
model=req.model,
|
| 60 |
+
system=system_blocks,
|
| 61 |
+
messages=messages,
|
| 62 |
+
stream=req.stream,
|
| 63 |
+
tools=tools,
|
| 64 |
+
tool_choice=req.tool_choice,
|
| 65 |
+
resume_session_id=resume_session_id,
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
def render_non_stream(
|
| 69 |
+
self,
|
| 70 |
+
req: CanonicalChatRequest,
|
| 71 |
+
raw_events: list[OpenAIStreamEvent],
|
| 72 |
+
) -> dict[str, Any]:
|
| 73 |
+
reply = "".join(
|
| 74 |
+
ev.content or ""
|
| 75 |
+
for ev in raw_events
|
| 76 |
+
if ev.type == "content_delta" and ev.content
|
| 77 |
+
)
|
| 78 |
+
session_marker = extract_session_id_marker(reply)
|
| 79 |
+
content_for_parse = strip_session_id_suffix(reply)
|
| 80 |
+
chat_id, created = self._response_context(req)
|
| 81 |
+
if req.tools:
|
| 82 |
+
parsed = parse_react_output(content_for_parse)
|
| 83 |
+
tool_calls_list = react_output_to_tool_calls(parsed) if parsed else []
|
| 84 |
+
if tool_calls_list:
|
| 85 |
+
thought_ns = ""
|
| 86 |
+
if "Thought" in content_for_parse:
|
| 87 |
+
match = re.search(
|
| 88 |
+
r"Thought[::]\s*(.+?)(?=\s*Action[::]|$)",
|
| 89 |
+
content_for_parse,
|
| 90 |
+
re.DOTALL | re.I,
|
| 91 |
+
)
|
| 92 |
+
thought_ns = (match.group(1) or "").strip() if match else ""
|
| 93 |
+
text_content = (
|
| 94 |
+
f"<think>{thought_ns}</think>\n{session_marker}".strip()
|
| 95 |
+
if thought_ns
|
| 96 |
+
else session_marker
|
| 97 |
+
)
|
| 98 |
+
return build_tool_calls_response(
|
| 99 |
+
tool_calls_list,
|
| 100 |
+
chat_id,
|
| 101 |
+
req.model,
|
| 102 |
+
created,
|
| 103 |
+
text_content=text_content,
|
| 104 |
+
)
|
| 105 |
+
content_reply = format_react_final_answer_content(content_for_parse)
|
| 106 |
+
if session_marker:
|
| 107 |
+
content_reply += session_marker
|
| 108 |
+
else:
|
| 109 |
+
content_reply = content_for_parse
|
| 110 |
+
return {
|
| 111 |
+
"id": chat_id,
|
| 112 |
+
"object": "chat.completion",
|
| 113 |
+
"created": created,
|
| 114 |
+
"model": req.model,
|
| 115 |
+
"choices": [
|
| 116 |
+
{
|
| 117 |
+
"index": 0,
|
| 118 |
+
"message": {"role": "assistant", "content": content_reply},
|
| 119 |
+
"finish_reason": "stop",
|
| 120 |
+
}
|
| 121 |
+
],
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
async def render_stream(
|
| 125 |
+
self,
|
| 126 |
+
req: CanonicalChatRequest,
|
| 127 |
+
raw_stream: AsyncIterator[OpenAIStreamEvent],
|
| 128 |
+
) -> AsyncIterator[str]:
|
| 129 |
+
chat_id, created = self._response_context(req)
|
| 130 |
+
parser = ReactStreamParser(
|
| 131 |
+
chat_id=chat_id,
|
| 132 |
+
model=req.model,
|
| 133 |
+
created=created,
|
| 134 |
+
has_tools=bool(req.tools),
|
| 135 |
+
)
|
| 136 |
+
session_marker = ""
|
| 137 |
+
async for event in raw_stream:
|
| 138 |
+
if event.type == "content_delta" and event.content:
|
| 139 |
+
chunk = event.content
|
| 140 |
+
if extract_session_id_marker(chunk) and not strip_session_id_suffix(
|
| 141 |
+
chunk
|
| 142 |
+
):
|
| 143 |
+
session_marker = chunk
|
| 144 |
+
continue
|
| 145 |
+
for sse in parser.feed(chunk):
|
| 146 |
+
yield sse
|
| 147 |
+
elif event.type == "finish":
|
| 148 |
+
break
|
| 149 |
+
if session_marker:
|
| 150 |
+
yield self._content_delta(chat_id, req.model, created, session_marker)
|
| 151 |
+
for sse in parser.finish():
|
| 152 |
+
yield sse
|
| 153 |
+
|
| 154 |
+
def render_error(self, exc: Exception) -> tuple[int, dict[str, Any]]:
|
| 155 |
+
status = 400 if isinstance(exc, ValueError) else 500
|
| 156 |
+
err_type = "invalid_request_error" if status == 400 else "server_error"
|
| 157 |
+
return (
|
| 158 |
+
status,
|
| 159 |
+
{"error": {"message": str(exc), "type": err_type}},
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
@staticmethod
|
| 163 |
+
def _message_to_raw_dict(msg: OpenAIMessage) -> dict[str, Any]:
|
| 164 |
+
if isinstance(msg.content, list):
|
| 165 |
+
content: str | list[dict[str, Any]] = [p.model_dump() for p in msg.content]
|
| 166 |
+
else:
|
| 167 |
+
content = msg.content
|
| 168 |
+
out: dict[str, Any] = {"role": msg.role, "content": content}
|
| 169 |
+
if msg.tool_calls is not None:
|
| 170 |
+
out["tool_calls"] = msg.tool_calls
|
| 171 |
+
if msg.tool_call_id is not None:
|
| 172 |
+
out["tool_call_id"] = msg.tool_call_id
|
| 173 |
+
return out
|
| 174 |
+
|
| 175 |
+
@staticmethod
|
| 176 |
+
def _to_blocks(
|
| 177 |
+
content: str | list[OpenAIContentPart] | None,
|
| 178 |
+
) -> list[CanonicalContentBlock]:
|
| 179 |
+
if content is None:
|
| 180 |
+
return []
|
| 181 |
+
if isinstance(content, str):
|
| 182 |
+
return [
|
| 183 |
+
CanonicalContentBlock(
|
| 184 |
+
type="text", text=strip_session_id_suffix(content)
|
| 185 |
+
)
|
| 186 |
+
]
|
| 187 |
+
blocks: list[CanonicalContentBlock] = []
|
| 188 |
+
for part in content:
|
| 189 |
+
if part.type == "text":
|
| 190 |
+
blocks.append(
|
| 191 |
+
CanonicalContentBlock(
|
| 192 |
+
type="text",
|
| 193 |
+
text=strip_session_id_suffix(part.text or ""),
|
| 194 |
+
)
|
| 195 |
+
)
|
| 196 |
+
elif part.type == "image_url":
|
| 197 |
+
image_url = part.image_url
|
| 198 |
+
url = image_url.get("url") if isinstance(image_url, dict) else image_url
|
| 199 |
+
if not url:
|
| 200 |
+
continue
|
| 201 |
+
if isinstance(url, str) and url.startswith("data:"):
|
| 202 |
+
blocks.append(CanonicalContentBlock(type="image", data=url))
|
| 203 |
+
else:
|
| 204 |
+
blocks.append(CanonicalContentBlock(type="image", url=str(url)))
|
| 205 |
+
return blocks
|
| 206 |
+
|
| 207 |
+
@staticmethod
|
| 208 |
+
def _to_tool_spec(tool: dict[str, Any]) -> CanonicalToolSpec:
|
| 209 |
+
function = tool.get("function") if tool.get("type") == "function" else tool
|
| 210 |
+
return CanonicalToolSpec(
|
| 211 |
+
name=str(function.get("name") or ""),
|
| 212 |
+
description=str(function.get("description") or ""),
|
| 213 |
+
input_schema=function.get("parameters")
|
| 214 |
+
or function.get("input_schema")
|
| 215 |
+
or {},
|
| 216 |
+
strict=bool(function.get("strict") or False),
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
@staticmethod
|
| 220 |
+
def _content_delta(chat_id: str, model: str, created: int, text: str) -> str:
|
| 221 |
+
return (
|
| 222 |
+
"data: "
|
| 223 |
+
+ json.dumps(
|
| 224 |
+
{
|
| 225 |
+
"id": chat_id,
|
| 226 |
+
"object": "chat.completion.chunk",
|
| 227 |
+
"created": created,
|
| 228 |
+
"model": model,
|
| 229 |
+
"choices": [
|
| 230 |
+
{
|
| 231 |
+
"index": 0,
|
| 232 |
+
"delta": {"content": text},
|
| 233 |
+
"logprobs": None,
|
| 234 |
+
"finish_reason": None,
|
| 235 |
+
}
|
| 236 |
+
],
|
| 237 |
+
},
|
| 238 |
+
ensure_ascii=False,
|
| 239 |
+
)
|
| 240 |
+
+ "\n\n"
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
@staticmethod
|
| 244 |
+
def _response_context(req: CanonicalChatRequest) -> tuple[str, int]:
|
| 245 |
+
chat_id = str(
|
| 246 |
+
req.metadata.setdefault(
|
| 247 |
+
"response_id", f"chatcmpl-{uuid_mod.uuid4().hex[:24]}"
|
| 248 |
+
)
|
| 249 |
+
)
|
| 250 |
+
created = int(req.metadata.setdefault("created", int(time.time())))
|
| 251 |
+
return chat_id, created
|
core/protocol/schemas.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""协议层内部统一模型。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
from typing import Any, Literal
|
| 6 |
+
|
| 7 |
+
from pydantic import BaseModel, Field
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class CanonicalContentBlock(BaseModel):
|
| 11 |
+
type: Literal["text", "thinking", "tool_use", "tool_result", "image"]
|
| 12 |
+
text: str | None = None
|
| 13 |
+
id: str | None = None
|
| 14 |
+
name: str | None = None
|
| 15 |
+
input: dict[str, Any] | None = None
|
| 16 |
+
tool_use_id: str | None = None
|
| 17 |
+
is_error: bool | None = None
|
| 18 |
+
mime_type: str | None = None
|
| 19 |
+
data: str | None = None
|
| 20 |
+
url: str | None = None
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class CanonicalMessage(BaseModel):
|
| 24 |
+
role: Literal["system", "user", "assistant", "tool"]
|
| 25 |
+
content: list[CanonicalContentBlock] = Field(default_factory=list)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class CanonicalToolSpec(BaseModel):
|
| 29 |
+
name: str
|
| 30 |
+
description: str = ""
|
| 31 |
+
input_schema: dict[str, Any] = Field(default_factory=dict)
|
| 32 |
+
strict: bool = False
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class CanonicalChatRequest(BaseModel):
|
| 36 |
+
protocol: Literal["openai", "anthropic"]
|
| 37 |
+
provider: str
|
| 38 |
+
model: str
|
| 39 |
+
system: list[CanonicalContentBlock] = Field(default_factory=list)
|
| 40 |
+
messages: list[CanonicalMessage] = Field(default_factory=list)
|
| 41 |
+
stream: bool = False
|
| 42 |
+
max_tokens: int | None = None
|
| 43 |
+
temperature: float | None = None
|
| 44 |
+
top_p: float | None = None
|
| 45 |
+
stop_sequences: list[str] = Field(default_factory=list)
|
| 46 |
+
tools: list[CanonicalToolSpec] = Field(default_factory=list)
|
| 47 |
+
tool_choice: str | dict[str, Any] | None = None
|
| 48 |
+
resume_session_id: str | None = None
|
| 49 |
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class CanonicalStreamEvent(BaseModel):
|
| 53 |
+
type: Literal[
|
| 54 |
+
"message_start",
|
| 55 |
+
"text_delta",
|
| 56 |
+
"thinking_delta",
|
| 57 |
+
"tool_call",
|
| 58 |
+
"usage",
|
| 59 |
+
"message_stop",
|
| 60 |
+
"error",
|
| 61 |
+
]
|
| 62 |
+
text: str | None = None
|
| 63 |
+
id: str | None = None
|
| 64 |
+
name: str | None = None
|
| 65 |
+
arguments: str | None = None
|
| 66 |
+
stop_reason: str | None = None
|
| 67 |
+
session_id: str | None = None
|
| 68 |
+
usage: dict[str, int] | None = None
|
| 69 |
+
error: str | None = None
|
core/protocol/service.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Canonical 请求桥接到 OpenAI 语义事件流(唯一中间态)。"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
from collections.abc import AsyncIterator
|
| 6 |
+
|
| 7 |
+
from core.api.chat_handler import ChatHandler
|
| 8 |
+
from core.api.schemas import (
|
| 9 |
+
InputAttachment,
|
| 10 |
+
OpenAIChatRequest,
|
| 11 |
+
OpenAIContentPart,
|
| 12 |
+
OpenAIMessage,
|
| 13 |
+
)
|
| 14 |
+
from core.protocol.images import (
|
| 15 |
+
MAX_IMAGE_COUNT,
|
| 16 |
+
download_remote_image,
|
| 17 |
+
parse_base64_image,
|
| 18 |
+
parse_data_url,
|
| 19 |
+
)
|
| 20 |
+
from core.hub.schemas import OpenAIStreamEvent
|
| 21 |
+
from core.protocol.schemas import CanonicalChatRequest, CanonicalContentBlock, CanonicalMessage
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class CanonicalChatService:
|
| 25 |
+
def __init__(self, handler: ChatHandler) -> None:
|
| 26 |
+
self._handler = handler
|
| 27 |
+
|
| 28 |
+
async def stream_raw(
|
| 29 |
+
self, req: CanonicalChatRequest
|
| 30 |
+
) -> AsyncIterator[OpenAIStreamEvent]:
|
| 31 |
+
openai_req = await self._to_openai_request(req)
|
| 32 |
+
async for event in self._handler.stream_openai_events(req.provider, openai_req):
|
| 33 |
+
yield event
|
| 34 |
+
|
| 35 |
+
async def collect_raw(self, req: CanonicalChatRequest) -> list[OpenAIStreamEvent]:
|
| 36 |
+
events: list[OpenAIStreamEvent] = []
|
| 37 |
+
async for event in self.stream_raw(req):
|
| 38 |
+
events.append(event)
|
| 39 |
+
return events
|
| 40 |
+
|
| 41 |
+
async def _to_openai_request(self, req: CanonicalChatRequest) -> OpenAIChatRequest:
|
| 42 |
+
messages: list[OpenAIMessage] = []
|
| 43 |
+
if req.system:
|
| 44 |
+
messages.append(
|
| 45 |
+
OpenAIMessage(
|
| 46 |
+
role="system",
|
| 47 |
+
content=self._to_openai_content(req.system),
|
| 48 |
+
)
|
| 49 |
+
)
|
| 50 |
+
for msg in req.messages:
|
| 51 |
+
messages.append(
|
| 52 |
+
OpenAIMessage(
|
| 53 |
+
role=msg.role,
|
| 54 |
+
content=self._to_openai_content(msg.content),
|
| 55 |
+
tool_call_id=msg.content[0].tool_use_id
|
| 56 |
+
if msg.role == "tool" and msg.content
|
| 57 |
+
else None,
|
| 58 |
+
)
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
openai_tools = [
|
| 62 |
+
{
|
| 63 |
+
"type": "function",
|
| 64 |
+
"function": {
|
| 65 |
+
"name": tool.name,
|
| 66 |
+
"description": tool.description,
|
| 67 |
+
"parameters": tool.input_schema,
|
| 68 |
+
"strict": tool.strict,
|
| 69 |
+
},
|
| 70 |
+
}
|
| 71 |
+
for tool in req.tools
|
| 72 |
+
]
|
| 73 |
+
last_user_attachments, all_attachments = await self._resolve_attachments(req)
|
| 74 |
+
return OpenAIChatRequest(
|
| 75 |
+
model=req.model,
|
| 76 |
+
messages=messages,
|
| 77 |
+
stream=req.stream,
|
| 78 |
+
tools=openai_tools or None,
|
| 79 |
+
tool_choice=req.tool_choice,
|
| 80 |
+
resume_session_id=req.resume_session_id,
|
| 81 |
+
upstream_model=str(req.metadata.get("upstream_model") or "") or None,
|
| 82 |
+
# 由 ChatHandler 根据是否 full_history 选择实际赋值给 attachment_files
|
| 83 |
+
attachment_files=[],
|
| 84 |
+
attachment_files_last_user=last_user_attachments,
|
| 85 |
+
attachment_files_all_users=all_attachments,
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
async def _resolve_attachments(
|
| 89 |
+
self, req: CanonicalChatRequest
|
| 90 |
+
) -> tuple[list[InputAttachment], list[InputAttachment]]:
|
| 91 |
+
"""
|
| 92 |
+
解析图片附件,返回 (last_user_attachments, all_user_attachments):
|
| 93 |
+
|
| 94 |
+
- 复用会话(full_history=False)时,仅需最后一条 user 的图片;
|
| 95 |
+
- 重建会话(full_history=True)时,需要把所有历史 user 的图片一并补上。
|
| 96 |
+
"""
|
| 97 |
+
last_user: CanonicalMessage | None = None
|
| 98 |
+
for msg in reversed(req.messages):
|
| 99 |
+
if msg.role == "user":
|
| 100 |
+
last_user = msg
|
| 101 |
+
break
|
| 102 |
+
|
| 103 |
+
# 所有 user 消息里的图片(用于重建会话补历史)
|
| 104 |
+
all_image_blocks: list[CanonicalContentBlock] = []
|
| 105 |
+
for msg in req.messages:
|
| 106 |
+
if msg.role != "user":
|
| 107 |
+
continue
|
| 108 |
+
all_image_blocks.extend(
|
| 109 |
+
block for block in msg.content if block.type == "image"
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
last_user_blocks: list[CanonicalContentBlock] = []
|
| 113 |
+
if last_user is not None:
|
| 114 |
+
last_user_blocks = [
|
| 115 |
+
block for block in last_user.content if block.type == "image"
|
| 116 |
+
]
|
| 117 |
+
|
| 118 |
+
if len(all_image_blocks) > MAX_IMAGE_COUNT:
|
| 119 |
+
raise ValueError(f"单次最多上传 {MAX_IMAGE_COUNT} 张图片")
|
| 120 |
+
|
| 121 |
+
async def _prepare(
|
| 122 |
+
blocks: list[CanonicalContentBlock],
|
| 123 |
+
) -> list[InputAttachment]:
|
| 124 |
+
attachments: list[InputAttachment] = []
|
| 125 |
+
for idx, block in enumerate(blocks, start=1):
|
| 126 |
+
if block.url:
|
| 127 |
+
prepared = await download_remote_image(
|
| 128 |
+
block.url, prefix=f"message_image_{idx}"
|
| 129 |
+
)
|
| 130 |
+
elif block.data and block.data.startswith("data:"):
|
| 131 |
+
prepared = parse_data_url(block.data, prefix=f"message_image_{idx}")
|
| 132 |
+
elif block.data and block.mime_type:
|
| 133 |
+
prepared = parse_base64_image(
|
| 134 |
+
block.data,
|
| 135 |
+
block.mime_type,
|
| 136 |
+
prefix=f"message_image_{idx}",
|
| 137 |
+
)
|
| 138 |
+
else:
|
| 139 |
+
raise ValueError("图片块缺少可用数据")
|
| 140 |
+
attachments.append(
|
| 141 |
+
InputAttachment(
|
| 142 |
+
filename=prepared.filename,
|
| 143 |
+
mime_type=prepared.mime_type,
|
| 144 |
+
data=prepared.data,
|
| 145 |
+
)
|
| 146 |
+
)
|
| 147 |
+
return attachments
|
| 148 |
+
|
| 149 |
+
last_attachments = await _prepare(last_user_blocks)
|
| 150 |
+
all_attachments = await _prepare(all_image_blocks)
|
| 151 |
+
return last_attachments, all_attachments
|
| 152 |
+
|
| 153 |
+
@staticmethod
|
| 154 |
+
def _to_openai_content(
|
| 155 |
+
blocks: list[CanonicalContentBlock],
|
| 156 |
+
) -> str | list[OpenAIContentPart]:
|
| 157 |
+
if not blocks:
|
| 158 |
+
return ""
|
| 159 |
+
parts: list[OpenAIContentPart] = []
|
| 160 |
+
for block in blocks:
|
| 161 |
+
if block.type in {"text", "thinking", "tool_result"}:
|
| 162 |
+
parts.append(OpenAIContentPart(type="text", text=block.text or ""))
|
| 163 |
+
elif block.type == "image":
|
| 164 |
+
url = block.url or block.data or ""
|
| 165 |
+
parts.append(
|
| 166 |
+
OpenAIContentPart(
|
| 167 |
+
type="image_url",
|
| 168 |
+
image_url={"url": url},
|
| 169 |
+
)
|
| 170 |
+
)
|
| 171 |
+
if not parts:
|
| 172 |
+
return ""
|
| 173 |
+
if len(parts) == 1 and parts[0].type == "text":
|
| 174 |
+
return parts[0].text or ""
|
| 175 |
+
return parts
|
core/runtime/__init__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""运行时:浏览器进程、CDP 连接、page/会话缓存。"""
|
| 2 |
+
|
| 3 |
+
from core.runtime.keys import ProxyKey
|
| 4 |
+
from core.runtime.session_cache import SessionCache, SessionEntry
|
| 5 |
+
from core.runtime.browser_manager import BrowserManager
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"ProxyKey",
|
| 9 |
+
"SessionCache",
|
| 10 |
+
"SessionEntry",
|
| 11 |
+
"BrowserManager",
|
| 12 |
+
]
|
core/runtime/browser_manager.py
ADDED
|
@@ -0,0 +1,839 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
浏览器管理器:按 ProxyKey 管理浏览器进程;每个浏览器内每个 type 仅保留一个 tab。
|
| 3 |
+
|
| 4 |
+
当前实现的职责:
|
| 5 |
+
|
| 6 |
+
- 一个 ProxyKey 对应一个 Chromium 进程
|
| 7 |
+
- 一个浏览器内,一个 type 只允许一个 page/tab
|
| 8 |
+
- tab 绑定一个 account,只有 drained 后才能切号
|
| 9 |
+
- tab 可承载多个 session,并记录活跃请求数与最近使用时间
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from __future__ import annotations
|
| 13 |
+
|
| 14 |
+
import asyncio
|
| 15 |
+
import logging
|
| 16 |
+
import os
|
| 17 |
+
import subprocess
|
| 18 |
+
import tempfile
|
| 19 |
+
import time
|
| 20 |
+
from dataclasses import dataclass, field
|
| 21 |
+
from pathlib import Path
|
| 22 |
+
from typing import TYPE_CHECKING, Any, Callable, Coroutine
|
| 23 |
+
|
| 24 |
+
if TYPE_CHECKING:
|
| 25 |
+
from core.runtime.local_proxy_forwarder import LocalProxyForwarder
|
| 26 |
+
|
| 27 |
+
from playwright.async_api import Browser, BrowserContext, Page, async_playwright
|
| 28 |
+
|
| 29 |
+
from core.constants import CDP_PORT_RANGE, CHROMIUM_BIN, TIMEZONE, user_data_dir
|
| 30 |
+
from core.plugin.errors import BrowserResourceInvalidError
|
| 31 |
+
from core.runtime.keys import ProxyKey
|
| 32 |
+
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
CreatePageFn = Callable[[BrowserContext, Page | None], Coroutine[Any, Any, Page]]
|
| 36 |
+
ApplyAuthFn = Callable[[BrowserContext, Page], Coroutine[Any, Any, None]]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
async def _wait_for_cdp(
|
| 40 |
+
host: str,
|
| 41 |
+
port: int,
|
| 42 |
+
max_attempts: int = 60,
|
| 43 |
+
interval: float = 2.0,
|
| 44 |
+
connect_timeout: float = 2.0,
|
| 45 |
+
) -> bool:
|
| 46 |
+
for _ in range(max_attempts):
|
| 47 |
+
try:
|
| 48 |
+
_, writer = await asyncio.wait_for(
|
| 49 |
+
asyncio.open_connection(host, port), timeout=connect_timeout
|
| 50 |
+
)
|
| 51 |
+
writer.close()
|
| 52 |
+
await writer.wait_closed()
|
| 53 |
+
return True
|
| 54 |
+
except (OSError, asyncio.TimeoutError):
|
| 55 |
+
await asyncio.sleep(interval)
|
| 56 |
+
return False
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def _is_cdp_listening(port: int) -> bool:
|
| 60 |
+
import socket
|
| 61 |
+
|
| 62 |
+
try:
|
| 63 |
+
with socket.create_connection(("127.0.0.1", port), timeout=1.0):
|
| 64 |
+
pass
|
| 65 |
+
return True
|
| 66 |
+
except OSError:
|
| 67 |
+
return False
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
@dataclass
|
| 71 |
+
class TabRuntime:
|
| 72 |
+
"""浏览器中的一个 type tab。"""
|
| 73 |
+
|
| 74 |
+
type_name: str
|
| 75 |
+
page: Page
|
| 76 |
+
account_id: str
|
| 77 |
+
active_requests: int = 0
|
| 78 |
+
accepting_new: bool = True
|
| 79 |
+
state: str = "ready"
|
| 80 |
+
last_used_at: float = field(default_factory=time.time)
|
| 81 |
+
frozen_until: int | None = None
|
| 82 |
+
sessions: set[str] = field(default_factory=set)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@dataclass
|
| 86 |
+
class BrowserEntry:
|
| 87 |
+
"""单个 ProxyKey 对应的浏览器运行时。"""
|
| 88 |
+
|
| 89 |
+
proc: subprocess.Popen[Any]
|
| 90 |
+
port: int
|
| 91 |
+
browser: Browser
|
| 92 |
+
context: BrowserContext
|
| 93 |
+
stderr_path: Path | None = None
|
| 94 |
+
tabs: dict[str, TabRuntime] = field(default_factory=dict)
|
| 95 |
+
last_used_at: float = field(default_factory=time.time)
|
| 96 |
+
proxy_forwarder: Any = None # LocalProxyForwarder | None,仅 use_proxy 时非空
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@dataclass
|
| 100 |
+
class ClosedTabInfo:
|
| 101 |
+
"""关闭 tab/browser 时回传的 session 清理信息。"""
|
| 102 |
+
|
| 103 |
+
proxy_key: ProxyKey
|
| 104 |
+
type_name: str
|
| 105 |
+
account_id: str
|
| 106 |
+
session_ids: list[str]
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class BrowserManager:
|
| 110 |
+
"""按代理组管理浏览器及其 type -> tab 映射。"""
|
| 111 |
+
|
| 112 |
+
def __init__(
|
| 113 |
+
self,
|
| 114 |
+
chromium_bin: str = CHROMIUM_BIN,
|
| 115 |
+
headless: bool = False,
|
| 116 |
+
no_sandbox: bool = False,
|
| 117 |
+
disable_gpu: bool = False,
|
| 118 |
+
disable_gpu_sandbox: bool = False,
|
| 119 |
+
port_range: list[int] | None = None,
|
| 120 |
+
cdp_wait_max_attempts: int = 90,
|
| 121 |
+
cdp_wait_interval_seconds: float = 2.0,
|
| 122 |
+
cdp_wait_connect_timeout_seconds: float = 2.0,
|
| 123 |
+
) -> None:
|
| 124 |
+
self._chromium_bin = chromium_bin
|
| 125 |
+
self._headless = headless
|
| 126 |
+
self._no_sandbox = no_sandbox
|
| 127 |
+
self._disable_gpu = disable_gpu
|
| 128 |
+
self._disable_gpu_sandbox = disable_gpu_sandbox
|
| 129 |
+
self._port_range = port_range or list(CDP_PORT_RANGE)
|
| 130 |
+
self._entries: dict[ProxyKey, BrowserEntry] = {}
|
| 131 |
+
self._available_ports: set[int] = set(self._port_range)
|
| 132 |
+
self._playwright: Any = None
|
| 133 |
+
self._cdp_wait_max_attempts = max(1, int(cdp_wait_max_attempts))
|
| 134 |
+
self._cdp_wait_interval_seconds = max(0.05, float(cdp_wait_interval_seconds))
|
| 135 |
+
self._cdp_wait_connect_timeout_seconds = max(
|
| 136 |
+
0.2, float(cdp_wait_connect_timeout_seconds)
|
| 137 |
+
)
|
| 138 |
+
|
| 139 |
+
def _stderr_log_path(self, proxy_key: ProxyKey, port: int) -> Path:
|
| 140 |
+
log_dir = Path(tempfile.gettempdir()) / "web2api-browser-logs"
|
| 141 |
+
log_dir.mkdir(parents=True, exist_ok=True)
|
| 142 |
+
return log_dir / (
|
| 143 |
+
f"{proxy_key.fingerprint_id}-{port}-{int(time.time())}.stderr.log"
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
@staticmethod
|
| 147 |
+
def _read_stderr_tail(stderr_path: Path | None, max_chars: int = 4000) -> str:
|
| 148 |
+
if stderr_path is None or not stderr_path.exists():
|
| 149 |
+
return ""
|
| 150 |
+
try:
|
| 151 |
+
content = stderr_path.read_text(encoding="utf-8", errors="replace")
|
| 152 |
+
except Exception:
|
| 153 |
+
return ""
|
| 154 |
+
content = content.strip()
|
| 155 |
+
if not content:
|
| 156 |
+
return ""
|
| 157 |
+
return content[-max_chars:]
|
| 158 |
+
|
| 159 |
+
@staticmethod
|
| 160 |
+
def _cleanup_stderr_log(stderr_path: Path | None) -> None:
|
| 161 |
+
if stderr_path is None:
|
| 162 |
+
return
|
| 163 |
+
try:
|
| 164 |
+
stderr_path.unlink(missing_ok=True)
|
| 165 |
+
except Exception:
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
def current_proxy_keys(self) -> list[ProxyKey]:
|
| 169 |
+
return list(self._entries.keys())
|
| 170 |
+
|
| 171 |
+
def browser_count(self) -> int:
|
| 172 |
+
return len(self._entries)
|
| 173 |
+
|
| 174 |
+
def list_browser_entries(self) -> list[tuple[ProxyKey, BrowserEntry]]:
|
| 175 |
+
return list(self._entries.items())
|
| 176 |
+
|
| 177 |
+
def get_browser_entry(self, proxy_key: ProxyKey) -> BrowserEntry | None:
|
| 178 |
+
return self._entries.get(proxy_key)
|
| 179 |
+
|
| 180 |
+
def get_tab(self, proxy_key: ProxyKey, type_name: str) -> TabRuntime | None:
|
| 181 |
+
entry = self._entries.get(proxy_key)
|
| 182 |
+
if entry is None:
|
| 183 |
+
return None
|
| 184 |
+
return entry.tabs.get(type_name)
|
| 185 |
+
|
| 186 |
+
def browser_load(self, proxy_key: ProxyKey) -> int:
|
| 187 |
+
entry = self._entries.get(proxy_key)
|
| 188 |
+
if entry is None:
|
| 189 |
+
return 0
|
| 190 |
+
return sum(tab.active_requests for tab in entry.tabs.values())
|
| 191 |
+
|
| 192 |
+
def browser_diagnostics(self, proxy_key: ProxyKey) -> dict[str, Any]:
|
| 193 |
+
entry = self._entries.get(proxy_key)
|
| 194 |
+
if entry is None:
|
| 195 |
+
return {
|
| 196 |
+
"browser_present": False,
|
| 197 |
+
"proc_alive": False,
|
| 198 |
+
"cdp_listening": False,
|
| 199 |
+
"stderr_tail": "",
|
| 200 |
+
"tab_count": 0,
|
| 201 |
+
"active_requests": 0,
|
| 202 |
+
"tabs": [],
|
| 203 |
+
}
|
| 204 |
+
tabs = [
|
| 205 |
+
{
|
| 206 |
+
"type": type_name,
|
| 207 |
+
"state": tab.state,
|
| 208 |
+
"accepting_new": tab.accepting_new,
|
| 209 |
+
"active_requests": tab.active_requests,
|
| 210 |
+
"session_count": len(tab.sessions),
|
| 211 |
+
}
|
| 212 |
+
for type_name, tab in entry.tabs.items()
|
| 213 |
+
]
|
| 214 |
+
return {
|
| 215 |
+
"browser_present": True,
|
| 216 |
+
"proc_alive": entry.proc.poll() is None,
|
| 217 |
+
"cdp_listening": _is_cdp_listening(entry.port),
|
| 218 |
+
"stderr_tail": self._read_stderr_tail(entry.stderr_path),
|
| 219 |
+
"tab_count": len(entry.tabs),
|
| 220 |
+
"active_requests": sum(tab.active_requests for tab in entry.tabs.values()),
|
| 221 |
+
"tabs": tabs,
|
| 222 |
+
}
|
| 223 |
+
|
| 224 |
+
def _raise_browser_resource_invalid(
|
| 225 |
+
self,
|
| 226 |
+
proxy_key: ProxyKey,
|
| 227 |
+
*,
|
| 228 |
+
detail: str,
|
| 229 |
+
helper_name: str,
|
| 230 |
+
stage: str,
|
| 231 |
+
resource_hint: str = "browser",
|
| 232 |
+
request_url: str = "",
|
| 233 |
+
page_url: str = "",
|
| 234 |
+
request_id: str | None = None,
|
| 235 |
+
stream_phase: str | None = None,
|
| 236 |
+
type_name: str | None = None,
|
| 237 |
+
account_id: str | None = None,
|
| 238 |
+
) -> None:
|
| 239 |
+
diagnostics = self.browser_diagnostics(proxy_key)
|
| 240 |
+
logger.warning(
|
| 241 |
+
"[browser-resource-invalid] helper=%s stage=%s proxy=%s resource=%s request_id=%s type=%s account=%s proc_alive=%s cdp_listening=%s tab_count=%s active_requests=%s stderr_tail=%s detail=%s",
|
| 242 |
+
helper_name,
|
| 243 |
+
stage,
|
| 244 |
+
proxy_key.fingerprint_id,
|
| 245 |
+
resource_hint,
|
| 246 |
+
request_id,
|
| 247 |
+
type_name,
|
| 248 |
+
account_id,
|
| 249 |
+
diagnostics.get("proc_alive"),
|
| 250 |
+
diagnostics.get("cdp_listening"),
|
| 251 |
+
diagnostics.get("tab_count"),
|
| 252 |
+
diagnostics.get("active_requests"),
|
| 253 |
+
diagnostics.get("stderr_tail"),
|
| 254 |
+
detail,
|
| 255 |
+
)
|
| 256 |
+
raise BrowserResourceInvalidError(
|
| 257 |
+
detail,
|
| 258 |
+
helper_name=helper_name,
|
| 259 |
+
operation="browser_manager",
|
| 260 |
+
stage=stage,
|
| 261 |
+
resource_hint=resource_hint,
|
| 262 |
+
request_url=request_url,
|
| 263 |
+
page_url=page_url,
|
| 264 |
+
request_id=request_id,
|
| 265 |
+
stream_phase=stream_phase,
|
| 266 |
+
proxy_key=proxy_key,
|
| 267 |
+
type_name=type_name,
|
| 268 |
+
account_id=account_id,
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
def touch_browser(self, proxy_key: ProxyKey) -> None:
|
| 272 |
+
entry = self._entries.get(proxy_key)
|
| 273 |
+
if entry is not None:
|
| 274 |
+
entry.last_used_at = time.time()
|
| 275 |
+
|
| 276 |
+
def _launch_process(
|
| 277 |
+
self,
|
| 278 |
+
proxy_key: ProxyKey,
|
| 279 |
+
proxy_pass: str,
|
| 280 |
+
port: int,
|
| 281 |
+
) -> tuple[subprocess.Popen[Any], Path, LocalProxyForwarder | None]:
|
| 282 |
+
"""启动 Chromium 进程(代理时使用本地转发鉴权,无扩展),使用指定 port。"""
|
| 283 |
+
udd = user_data_dir(proxy_key.fingerprint_id)
|
| 284 |
+
udd.mkdir(parents=True, exist_ok=True)
|
| 285 |
+
|
| 286 |
+
if not Path(self._chromium_bin).exists():
|
| 287 |
+
raise RuntimeError(f"Chromium 不存在: {self._chromium_bin}")
|
| 288 |
+
|
| 289 |
+
args = [
|
| 290 |
+
self._chromium_bin,
|
| 291 |
+
f"--remote-debugging-port={port}",
|
| 292 |
+
f"--fingerprint={proxy_key.fingerprint_id}",
|
| 293 |
+
"--fingerprint-platform=windows",
|
| 294 |
+
"--fingerprint-brand=Edge",
|
| 295 |
+
f"--user-data-dir={udd}",
|
| 296 |
+
f"--timezone={proxy_key.timezone or TIMEZONE}",
|
| 297 |
+
"--force-webrtc-ip-handling-policy",
|
| 298 |
+
"--webrtc-ip-handling-policy=disable_non_proxied_udp",
|
| 299 |
+
"--disable-features=AsyncDNS",
|
| 300 |
+
"--disable-dev-shm-usage",
|
| 301 |
+
"--no-first-run",
|
| 302 |
+
"--no-default-browser-check",
|
| 303 |
+
# Memory optimization for constrained environments (HF Spaces cpu-basic)
|
| 304 |
+
"--renderer-process-limit=1",
|
| 305 |
+
"--disable-extensions",
|
| 306 |
+
"--disable-background-networking",
|
| 307 |
+
"--disable-component-update",
|
| 308 |
+
"--disable-sync",
|
| 309 |
+
"--disable-translate",
|
| 310 |
+
"--disable-features=MediaRouter,TranslateUI",
|
| 311 |
+
"--js-flags=--max-old-space-size=256",
|
| 312 |
+
]
|
| 313 |
+
proxy_forwarder = None
|
| 314 |
+
if proxy_key.use_proxy:
|
| 315 |
+
from core.runtime.local_proxy_forwarder import (
|
| 316 |
+
LocalProxyForwarder,
|
| 317 |
+
UpstreamProxy,
|
| 318 |
+
parse_proxy_server,
|
| 319 |
+
)
|
| 320 |
+
|
| 321 |
+
upstream_host, upstream_port = parse_proxy_server(proxy_key.proxy_host)
|
| 322 |
+
upstream = UpstreamProxy(
|
| 323 |
+
host=upstream_host,
|
| 324 |
+
port=upstream_port,
|
| 325 |
+
username=proxy_key.proxy_user,
|
| 326 |
+
password=proxy_pass,
|
| 327 |
+
)
|
| 328 |
+
proxy_forwarder = LocalProxyForwarder(
|
| 329 |
+
upstream,
|
| 330 |
+
listen_host="127.0.0.1",
|
| 331 |
+
listen_port=0,
|
| 332 |
+
on_log=lambda msg: logger.debug("[proxy] %s", msg),
|
| 333 |
+
)
|
| 334 |
+
proxy_forwarder.start()
|
| 335 |
+
args.append(f"--proxy-server={proxy_forwarder.proxy_url}")
|
| 336 |
+
if self._headless:
|
| 337 |
+
args.extend(
|
| 338 |
+
[
|
| 339 |
+
"--headless=new",
|
| 340 |
+
"--window-size=1920,1080",
|
| 341 |
+
]
|
| 342 |
+
)
|
| 343 |
+
if self._headless or self._disable_gpu:
|
| 344 |
+
args.append("--disable-gpu")
|
| 345 |
+
if self._disable_gpu_sandbox:
|
| 346 |
+
args.append("--disable-gpu-sandbox")
|
| 347 |
+
if self._no_sandbox:
|
| 348 |
+
args.extend(
|
| 349 |
+
[
|
| 350 |
+
"--no-sandbox",
|
| 351 |
+
"--disable-setuid-sandbox",
|
| 352 |
+
]
|
| 353 |
+
)
|
| 354 |
+
env = os.environ.copy()
|
| 355 |
+
env["NODE_OPTIONS"] = (
|
| 356 |
+
env.get("NODE_OPTIONS") or ""
|
| 357 |
+
).strip() + " --no-deprecation"
|
| 358 |
+
env.setdefault("DBUS_SESSION_BUS_ADDRESS", "/dev/null")
|
| 359 |
+
stderr_path = self._stderr_log_path(proxy_key, port)
|
| 360 |
+
stderr_fp = stderr_path.open("ab")
|
| 361 |
+
try:
|
| 362 |
+
proc = subprocess.Popen(
|
| 363 |
+
args,
|
| 364 |
+
stdin=subprocess.DEVNULL,
|
| 365 |
+
stdout=subprocess.DEVNULL,
|
| 366 |
+
stderr=stderr_fp,
|
| 367 |
+
env=env,
|
| 368 |
+
)
|
| 369 |
+
finally:
|
| 370 |
+
stderr_fp.close()
|
| 371 |
+
return proc, stderr_path, proxy_forwarder
|
| 372 |
+
|
| 373 |
+
async def ensure_browser(
|
| 374 |
+
self,
|
| 375 |
+
proxy_key: ProxyKey,
|
| 376 |
+
proxy_pass: str,
|
| 377 |
+
) -> BrowserContext:
|
| 378 |
+
"""
|
| 379 |
+
确保存在对应 proxy_key 的浏览器;若已有且存活则直接复用。
|
| 380 |
+
"""
|
| 381 |
+
entry = self._entries.get(proxy_key)
|
| 382 |
+
if entry is not None:
|
| 383 |
+
if entry.proc.poll() is not None or not _is_cdp_listening(entry.port):
|
| 384 |
+
await self._close_entry_async(proxy_key)
|
| 385 |
+
else:
|
| 386 |
+
entry.last_used_at = time.time()
|
| 387 |
+
return entry.context
|
| 388 |
+
|
| 389 |
+
if not self._available_ports:
|
| 390 |
+
raise RuntimeError(
|
| 391 |
+
"无可用 CDP 端口,当前并发浏览器数已达上限,请稍后重试或增大 cdp_port_count"
|
| 392 |
+
)
|
| 393 |
+
port = self._available_ports.pop()
|
| 394 |
+
proc, stderr_path, proxy_forwarder = self._launch_process(
|
| 395 |
+
proxy_key, proxy_pass, port
|
| 396 |
+
)
|
| 397 |
+
logger.info(
|
| 398 |
+
"已启动 Chromium PID=%s port=%s mode=%s headless=%s no_sandbox=%s disable_gpu=%s disable_gpu_sandbox=%s,等待 CDP 就绪...",
|
| 399 |
+
proc.pid,
|
| 400 |
+
port,
|
| 401 |
+
"proxy" if proxy_key.use_proxy else "direct",
|
| 402 |
+
self._headless,
|
| 403 |
+
self._no_sandbox,
|
| 404 |
+
self._disable_gpu,
|
| 405 |
+
self._disable_gpu_sandbox,
|
| 406 |
+
)
|
| 407 |
+
ok = await _wait_for_cdp(
|
| 408 |
+
"127.0.0.1",
|
| 409 |
+
port,
|
| 410 |
+
max_attempts=self._cdp_wait_max_attempts,
|
| 411 |
+
interval=self._cdp_wait_interval_seconds,
|
| 412 |
+
connect_timeout=self._cdp_wait_connect_timeout_seconds,
|
| 413 |
+
)
|
| 414 |
+
if not ok:
|
| 415 |
+
self._available_ports.add(port)
|
| 416 |
+
if proxy_forwarder is not None:
|
| 417 |
+
try:
|
| 418 |
+
proxy_forwarder.stop()
|
| 419 |
+
except Exception:
|
| 420 |
+
pass
|
| 421 |
+
try:
|
| 422 |
+
proc.terminate()
|
| 423 |
+
proc.wait(timeout=5)
|
| 424 |
+
except Exception:
|
| 425 |
+
pass
|
| 426 |
+
stderr_tail = self._read_stderr_tail(stderr_path)
|
| 427 |
+
self._cleanup_stderr_log(stderr_path)
|
| 428 |
+
if stderr_tail:
|
| 429 |
+
logger.error(
|
| 430 |
+
"Chromium 启动失败,CDP 未就绪。stderr tail:\n%s",
|
| 431 |
+
stderr_tail,
|
| 432 |
+
)
|
| 433 |
+
raise RuntimeError("CDP 未在预期时间内就绪")
|
| 434 |
+
|
| 435 |
+
if self._playwright is None:
|
| 436 |
+
self._playwright = await async_playwright().start()
|
| 437 |
+
endpoint = f"http://127.0.0.1:{port}"
|
| 438 |
+
try:
|
| 439 |
+
browser = await self._playwright.chromium.connect_over_cdp(
|
| 440 |
+
endpoint, timeout=10000
|
| 441 |
+
)
|
| 442 |
+
except Exception:
|
| 443 |
+
self._available_ports.add(port)
|
| 444 |
+
if proxy_forwarder is not None:
|
| 445 |
+
try:
|
| 446 |
+
proxy_forwarder.stop()
|
| 447 |
+
except Exception:
|
| 448 |
+
pass
|
| 449 |
+
try:
|
| 450 |
+
proc.terminate()
|
| 451 |
+
proc.wait(timeout=5)
|
| 452 |
+
except Exception:
|
| 453 |
+
pass
|
| 454 |
+
stderr_tail = self._read_stderr_tail(stderr_path)
|
| 455 |
+
self._cleanup_stderr_log(stderr_path)
|
| 456 |
+
if stderr_tail:
|
| 457 |
+
logger.error(
|
| 458 |
+
"Chromium 已监听 CDP 但 connect_over_cdp 失败。stderr tail:\n%s",
|
| 459 |
+
stderr_tail,
|
| 460 |
+
)
|
| 461 |
+
raise
|
| 462 |
+
context = browser.contexts[0] if browser.contexts else None
|
| 463 |
+
if context is None:
|
| 464 |
+
await browser.close()
|
| 465 |
+
self._available_ports.add(port)
|
| 466 |
+
if proxy_forwarder is not None:
|
| 467 |
+
try:
|
| 468 |
+
proxy_forwarder.stop()
|
| 469 |
+
except Exception:
|
| 470 |
+
pass
|
| 471 |
+
try:
|
| 472 |
+
proc.terminate()
|
| 473 |
+
proc.wait(timeout=5)
|
| 474 |
+
except Exception:
|
| 475 |
+
pass
|
| 476 |
+
self._cleanup_stderr_log(stderr_path)
|
| 477 |
+
raise RuntimeError("浏览器无默认 context")
|
| 478 |
+
self._entries[proxy_key] = BrowserEntry(
|
| 479 |
+
proc=proc,
|
| 480 |
+
port=port,
|
| 481 |
+
browser=browser,
|
| 482 |
+
context=context,
|
| 483 |
+
stderr_path=stderr_path,
|
| 484 |
+
proxy_forwarder=proxy_forwarder,
|
| 485 |
+
)
|
| 486 |
+
return context
|
| 487 |
+
|
| 488 |
+
async def open_tab(
|
| 489 |
+
self,
|
| 490 |
+
proxy_key: ProxyKey,
|
| 491 |
+
proxy_pass: str,
|
| 492 |
+
type_name: str,
|
| 493 |
+
account_id: str,
|
| 494 |
+
create_page_fn: CreatePageFn,
|
| 495 |
+
apply_auth_fn: ApplyAuthFn,
|
| 496 |
+
) -> TabRuntime:
|
| 497 |
+
"""在指定浏览器中创建一个 type tab,并绑定到 account。"""
|
| 498 |
+
context = await self.ensure_browser(proxy_key, proxy_pass)
|
| 499 |
+
entry = self._entries.get(proxy_key)
|
| 500 |
+
if entry is None:
|
| 501 |
+
raise RuntimeError("ensure_browser 未创建 entry")
|
| 502 |
+
existing = entry.tabs.get(type_name)
|
| 503 |
+
if existing is not None:
|
| 504 |
+
return existing
|
| 505 |
+
|
| 506 |
+
logger.info(
|
| 507 |
+
"[tab] opening proxy=%s type=%s account=%s reuse_blank=%s tab_count=%s active_requests=%s",
|
| 508 |
+
proxy_key.fingerprint_id,
|
| 509 |
+
type_name,
|
| 510 |
+
account_id,
|
| 511 |
+
bool(len(entry.tabs) == 0 and context.pages),
|
| 512 |
+
len(entry.tabs),
|
| 513 |
+
sum(tab.active_requests for tab in entry.tabs.values()),
|
| 514 |
+
)
|
| 515 |
+
# 首个 tab 时复用 Chromium 默认空白页,避免多一个无用标签
|
| 516 |
+
reuse_page = (
|
| 517 |
+
context.pages[0] if (len(entry.tabs) == 0 and context.pages) else None
|
| 518 |
+
)
|
| 519 |
+
try:
|
| 520 |
+
page = await create_page_fn(context, reuse_page)
|
| 521 |
+
except Exception as e:
|
| 522 |
+
msg = str(e)
|
| 523 |
+
normalized = msg.lower()
|
| 524 |
+
if "target.createtarget" in normalized or "failed to open a new tab" in normalized:
|
| 525 |
+
self._raise_browser_resource_invalid(
|
| 526 |
+
proxy_key,
|
| 527 |
+
detail=msg,
|
| 528 |
+
helper_name="open_tab",
|
| 529 |
+
stage="create_page",
|
| 530 |
+
resource_hint="browser",
|
| 531 |
+
type_name=type_name,
|
| 532 |
+
account_id=account_id,
|
| 533 |
+
)
|
| 534 |
+
raise
|
| 535 |
+
try:
|
| 536 |
+
await apply_auth_fn(context, page)
|
| 537 |
+
except Exception as e:
|
| 538 |
+
try:
|
| 539 |
+
await page.close()
|
| 540 |
+
except Exception:
|
| 541 |
+
pass
|
| 542 |
+
msg = str(e)
|
| 543 |
+
normalized = msg.lower()
|
| 544 |
+
if (
|
| 545 |
+
"target crashed" in normalized
|
| 546 |
+
or "page has been closed" in normalized
|
| 547 |
+
or "browser has been closed" in normalized
|
| 548 |
+
or "has been disconnected" in normalized
|
| 549 |
+
):
|
| 550 |
+
self._raise_browser_resource_invalid(
|
| 551 |
+
proxy_key,
|
| 552 |
+
detail=msg,
|
| 553 |
+
helper_name="open_tab",
|
| 554 |
+
stage="apply_auth",
|
| 555 |
+
resource_hint="browser",
|
| 556 |
+
page_url=getattr(page, "url", "") or "",
|
| 557 |
+
type_name=type_name,
|
| 558 |
+
account_id=account_id,
|
| 559 |
+
)
|
| 560 |
+
raise
|
| 561 |
+
|
| 562 |
+
tab = TabRuntime(
|
| 563 |
+
type_name=type_name,
|
| 564 |
+
page=page,
|
| 565 |
+
account_id=account_id,
|
| 566 |
+
)
|
| 567 |
+
entry.tabs[type_name] = tab
|
| 568 |
+
entry.last_used_at = time.time()
|
| 569 |
+
logger.info(
|
| 570 |
+
"[tab] opened mode=%s proxy=%s type=%s account=%s",
|
| 571 |
+
"proxy" if proxy_key.use_proxy else "direct",
|
| 572 |
+
proxy_key.fingerprint_id,
|
| 573 |
+
type_name,
|
| 574 |
+
account_id,
|
| 575 |
+
)
|
| 576 |
+
return tab
|
| 577 |
+
|
| 578 |
+
async def switch_tab_account(
|
| 579 |
+
self,
|
| 580 |
+
proxy_key: ProxyKey,
|
| 581 |
+
type_name: str,
|
| 582 |
+
account_id: str,
|
| 583 |
+
apply_auth_fn: ApplyAuthFn,
|
| 584 |
+
) -> bool:
|
| 585 |
+
"""
|
| 586 |
+
在同一个 page 上切换账号。只有 drained 后(active_requests==0)才允许切号。
|
| 587 |
+
"""
|
| 588 |
+
entry = self._entries.get(proxy_key)
|
| 589 |
+
if entry is None:
|
| 590 |
+
return False
|
| 591 |
+
tab = entry.tabs.get(type_name)
|
| 592 |
+
if tab is None or tab.active_requests != 0:
|
| 593 |
+
return False
|
| 594 |
+
|
| 595 |
+
tab.accepting_new = False
|
| 596 |
+
tab.state = "switching"
|
| 597 |
+
try:
|
| 598 |
+
await apply_auth_fn(entry.context, tab.page)
|
| 599 |
+
except Exception:
|
| 600 |
+
tab.state = "draining"
|
| 601 |
+
return False
|
| 602 |
+
|
| 603 |
+
tab.account_id = account_id
|
| 604 |
+
tab.accepting_new = True
|
| 605 |
+
tab.state = "ready"
|
| 606 |
+
tab.frozen_until = None
|
| 607 |
+
tab.last_used_at = time.time()
|
| 608 |
+
tab.sessions.clear()
|
| 609 |
+
entry.last_used_at = time.time()
|
| 610 |
+
logger.info(
|
| 611 |
+
"[tab] switched account mode=%s proxy=%s type=%s account=%s",
|
| 612 |
+
"proxy" if proxy_key.use_proxy else "direct",
|
| 613 |
+
proxy_key.fingerprint_id,
|
| 614 |
+
type_name,
|
| 615 |
+
account_id,
|
| 616 |
+
)
|
| 617 |
+
return True
|
| 618 |
+
|
| 619 |
+
def acquire_tab(
|
| 620 |
+
self,
|
| 621 |
+
proxy_key: ProxyKey,
|
| 622 |
+
type_name: str,
|
| 623 |
+
max_concurrent: int,
|
| 624 |
+
) -> Page | None:
|
| 625 |
+
"""
|
| 626 |
+
为一次请求占用 tab;tab 必须存在、可接新请求且未达到并发上限。
|
| 627 |
+
"""
|
| 628 |
+
entry = self._entries.get(proxy_key)
|
| 629 |
+
if entry is None:
|
| 630 |
+
return None
|
| 631 |
+
tab = entry.tabs.get(type_name)
|
| 632 |
+
if tab is None:
|
| 633 |
+
return None
|
| 634 |
+
if not tab.accepting_new or tab.active_requests >= max_concurrent:
|
| 635 |
+
return None
|
| 636 |
+
tab.active_requests += 1
|
| 637 |
+
tab.last_used_at = time.time()
|
| 638 |
+
entry.last_used_at = tab.last_used_at
|
| 639 |
+
tab.state = "busy"
|
| 640 |
+
return tab.page
|
| 641 |
+
|
| 642 |
+
def release_tab(self, proxy_key: ProxyKey, type_name: str) -> None:
|
| 643 |
+
"""释放一次请求占用。"""
|
| 644 |
+
entry = self._entries.get(proxy_key)
|
| 645 |
+
if entry is None:
|
| 646 |
+
return
|
| 647 |
+
tab = entry.tabs.get(type_name)
|
| 648 |
+
if tab is None:
|
| 649 |
+
return
|
| 650 |
+
if tab.active_requests > 0:
|
| 651 |
+
tab.active_requests -= 1
|
| 652 |
+
tab.last_used_at = time.time()
|
| 653 |
+
entry.last_used_at = tab.last_used_at
|
| 654 |
+
if tab.active_requests == 0:
|
| 655 |
+
if tab.accepting_new:
|
| 656 |
+
tab.state = "ready"
|
| 657 |
+
elif tab.frozen_until is not None:
|
| 658 |
+
tab.state = "frozen"
|
| 659 |
+
else:
|
| 660 |
+
tab.state = "draining"
|
| 661 |
+
|
| 662 |
+
def mark_tab_draining(
|
| 663 |
+
self,
|
| 664 |
+
proxy_key: ProxyKey,
|
| 665 |
+
type_name: str,
|
| 666 |
+
*,
|
| 667 |
+
frozen_until: int | None = None,
|
| 668 |
+
) -> None:
|
| 669 |
+
"""禁止 tab 接受新请求,并标记为 draining/frozen。"""
|
| 670 |
+
entry = self._entries.get(proxy_key)
|
| 671 |
+
if entry is None:
|
| 672 |
+
return
|
| 673 |
+
tab = entry.tabs.get(type_name)
|
| 674 |
+
if tab is None:
|
| 675 |
+
return
|
| 676 |
+
tab.accepting_new = False
|
| 677 |
+
tab.frozen_until = frozen_until
|
| 678 |
+
tab.last_used_at = time.time()
|
| 679 |
+
entry.last_used_at = tab.last_used_at
|
| 680 |
+
if frozen_until is not None:
|
| 681 |
+
tab.state = "frozen"
|
| 682 |
+
else:
|
| 683 |
+
tab.state = "draining"
|
| 684 |
+
|
| 685 |
+
def register_session(
|
| 686 |
+
self,
|
| 687 |
+
proxy_key: ProxyKey,
|
| 688 |
+
type_name: str,
|
| 689 |
+
session_id: str,
|
| 690 |
+
) -> None:
|
| 691 |
+
entry = self._entries.get(proxy_key)
|
| 692 |
+
if entry is None:
|
| 693 |
+
return
|
| 694 |
+
tab = entry.tabs.get(type_name)
|
| 695 |
+
if tab is None:
|
| 696 |
+
return
|
| 697 |
+
tab.sessions.add(session_id)
|
| 698 |
+
tab.last_used_at = time.time()
|
| 699 |
+
entry.last_used_at = tab.last_used_at
|
| 700 |
+
|
| 701 |
+
def unregister_session(
|
| 702 |
+
self,
|
| 703 |
+
proxy_key: ProxyKey,
|
| 704 |
+
type_name: str,
|
| 705 |
+
session_id: str,
|
| 706 |
+
) -> None:
|
| 707 |
+
entry = self._entries.get(proxy_key)
|
| 708 |
+
if entry is None:
|
| 709 |
+
return
|
| 710 |
+
tab = entry.tabs.get(type_name)
|
| 711 |
+
if tab is None:
|
| 712 |
+
return
|
| 713 |
+
tab.sessions.discard(session_id)
|
| 714 |
+
|
| 715 |
+
async def close_tab(
|
| 716 |
+
self,
|
| 717 |
+
proxy_key: ProxyKey,
|
| 718 |
+
type_name: str,
|
| 719 |
+
) -> ClosedTabInfo | None:
|
| 720 |
+
"""关闭某个 type 的 tab,并返回需要失效的 session 列表。"""
|
| 721 |
+
entry = self._entries.get(proxy_key)
|
| 722 |
+
if entry is None:
|
| 723 |
+
return None
|
| 724 |
+
tab = entry.tabs.pop(type_name, None)
|
| 725 |
+
if tab is None:
|
| 726 |
+
return None
|
| 727 |
+
try:
|
| 728 |
+
await tab.page.close()
|
| 729 |
+
except Exception:
|
| 730 |
+
pass
|
| 731 |
+
entry.last_used_at = time.time()
|
| 732 |
+
logger.info(
|
| 733 |
+
"[tab] closed mode=%s proxy=%s type=%s",
|
| 734 |
+
"proxy" if proxy_key.use_proxy else "direct",
|
| 735 |
+
proxy_key.fingerprint_id,
|
| 736 |
+
type_name,
|
| 737 |
+
)
|
| 738 |
+
return ClosedTabInfo(
|
| 739 |
+
proxy_key=proxy_key,
|
| 740 |
+
type_name=type_name,
|
| 741 |
+
account_id=tab.account_id,
|
| 742 |
+
session_ids=list(tab.sessions),
|
| 743 |
+
)
|
| 744 |
+
|
| 745 |
+
async def close_browser(self, proxy_key: ProxyKey) -> list[ClosedTabInfo]:
|
| 746 |
+
return await self._close_entry_async(proxy_key)
|
| 747 |
+
|
| 748 |
+
async def _close_entry_async(self, proxy_key: ProxyKey) -> list[ClosedTabInfo]:
|
| 749 |
+
entry = self._entries.get(proxy_key)
|
| 750 |
+
if entry is None:
|
| 751 |
+
return []
|
| 752 |
+
|
| 753 |
+
closed_tabs = [
|
| 754 |
+
ClosedTabInfo(
|
| 755 |
+
proxy_key=proxy_key,
|
| 756 |
+
type_name=type_name,
|
| 757 |
+
account_id=tab.account_id,
|
| 758 |
+
session_ids=list(tab.sessions),
|
| 759 |
+
)
|
| 760 |
+
for type_name, tab in entry.tabs.items()
|
| 761 |
+
]
|
| 762 |
+
for tab in list(entry.tabs.values()):
|
| 763 |
+
try:
|
| 764 |
+
await tab.page.close()
|
| 765 |
+
except Exception:
|
| 766 |
+
pass
|
| 767 |
+
entry.tabs.clear()
|
| 768 |
+
if entry.proxy_forwarder is not None:
|
| 769 |
+
try:
|
| 770 |
+
entry.proxy_forwarder.stop()
|
| 771 |
+
except Exception as e:
|
| 772 |
+
logger.warning("关闭本地代理转发时异常: %s", e)
|
| 773 |
+
if entry.browser is not None:
|
| 774 |
+
try:
|
| 775 |
+
await entry.browser.close()
|
| 776 |
+
except Exception as e:
|
| 777 |
+
logger.warning("关闭 CDP 浏览器时异常: %s", e)
|
| 778 |
+
try:
|
| 779 |
+
entry.proc.terminate()
|
| 780 |
+
entry.proc.wait(timeout=8)
|
| 781 |
+
except subprocess.TimeoutExpired:
|
| 782 |
+
entry.proc.kill()
|
| 783 |
+
entry.proc.wait(timeout=3)
|
| 784 |
+
except Exception as e:
|
| 785 |
+
logger.warning("关闭浏览器进程时异常: %s", e)
|
| 786 |
+
self._cleanup_stderr_log(entry.stderr_path)
|
| 787 |
+
self._available_ports.add(entry.port)
|
| 788 |
+
del self._entries[proxy_key]
|
| 789 |
+
logger.info(
|
| 790 |
+
"[browser] closed mode=%s proxy=%s",
|
| 791 |
+
"proxy" if proxy_key.use_proxy else "direct",
|
| 792 |
+
proxy_key.fingerprint_id,
|
| 793 |
+
)
|
| 794 |
+
return closed_tabs
|
| 795 |
+
|
| 796 |
+
async def collect_idle_browsers(
|
| 797 |
+
self,
|
| 798 |
+
*,
|
| 799 |
+
idle_seconds: float,
|
| 800 |
+
resident_browser_count: int,
|
| 801 |
+
) -> list[ClosedTabInfo]:
|
| 802 |
+
"""
|
| 803 |
+
关闭空闲浏览器:
|
| 804 |
+
|
| 805 |
+
- 浏览器下所有 tab 都没有活跃请求
|
| 806 |
+
- 所有 tab 均已空闲超过 idle_seconds
|
| 807 |
+
- 当前浏览器数 > resident_browser_count
|
| 808 |
+
"""
|
| 809 |
+
if len(self._entries) <= resident_browser_count:
|
| 810 |
+
return []
|
| 811 |
+
|
| 812 |
+
now = time.time()
|
| 813 |
+
candidates: list[tuple[float, ProxyKey]] = []
|
| 814 |
+
for proxy_key, entry in self._entries.items():
|
| 815 |
+
if any(tab.active_requests > 0 for tab in entry.tabs.values()):
|
| 816 |
+
continue
|
| 817 |
+
if entry.tabs:
|
| 818 |
+
last_tab_used = max(tab.last_used_at for tab in entry.tabs.values())
|
| 819 |
+
else:
|
| 820 |
+
last_tab_used = entry.last_used_at
|
| 821 |
+
if now - last_tab_used < idle_seconds:
|
| 822 |
+
continue
|
| 823 |
+
candidates.append((last_tab_used, proxy_key))
|
| 824 |
+
|
| 825 |
+
if not candidates:
|
| 826 |
+
return []
|
| 827 |
+
|
| 828 |
+
closed: list[ClosedTabInfo] = []
|
| 829 |
+
max_close = max(0, len(self._entries) - resident_browser_count)
|
| 830 |
+
for _, proxy_key in sorted(candidates, key=lambda item: item[0])[:max_close]:
|
| 831 |
+
closed.extend(await self._close_entry_async(proxy_key))
|
| 832 |
+
return closed
|
| 833 |
+
|
| 834 |
+
async def close_all(self) -> list[ClosedTabInfo]:
|
| 835 |
+
"""关闭全部浏览器和 tab。"""
|
| 836 |
+
closed: list[ClosedTabInfo] = []
|
| 837 |
+
for proxy_key in list(self._entries.keys()):
|
| 838 |
+
closed.extend(await self._close_entry_async(proxy_key))
|
| 839 |
+
return closed
|
core/runtime/conversation_index.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""会话指纹索引:替代 sticky session,通过指纹精确匹配同一逻辑对话。
|
| 2 |
+
|
| 3 |
+
指纹 = sha256(system_prompt + first_user_message)[:16],
|
| 4 |
+
同一对话的指纹恒定,不同对话指纹不同,杜绝上下文污染。
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import time
|
| 8 |
+
from dataclasses import dataclass, field
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@dataclass
|
| 12 |
+
class ConversationEntry:
|
| 13 |
+
session_id: str
|
| 14 |
+
fingerprint: str
|
| 15 |
+
message_count: int
|
| 16 |
+
account_id: str
|
| 17 |
+
created_at: float = field(default_factory=time.time)
|
| 18 |
+
last_used_at: float = field(default_factory=time.time)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class ConversationIndex:
|
| 22 |
+
"""进程内指纹索引,不持久化。"""
|
| 23 |
+
|
| 24 |
+
def __init__(self) -> None:
|
| 25 |
+
self._by_fingerprint: dict[str, ConversationEntry] = {}
|
| 26 |
+
self._by_session_id: dict[str, ConversationEntry] = {}
|
| 27 |
+
|
| 28 |
+
def register(
|
| 29 |
+
self,
|
| 30 |
+
fingerprint: str,
|
| 31 |
+
session_id: str,
|
| 32 |
+
message_count: int,
|
| 33 |
+
account_id: str,
|
| 34 |
+
) -> None:
|
| 35 |
+
# Remove old entry for this fingerprint if exists
|
| 36 |
+
old = self._by_fingerprint.pop(fingerprint, None)
|
| 37 |
+
if old is not None:
|
| 38 |
+
self._by_session_id.pop(old.session_id, None)
|
| 39 |
+
entry = ConversationEntry(
|
| 40 |
+
session_id=session_id,
|
| 41 |
+
fingerprint=fingerprint,
|
| 42 |
+
message_count=message_count,
|
| 43 |
+
account_id=account_id,
|
| 44 |
+
)
|
| 45 |
+
self._by_fingerprint[fingerprint] = entry
|
| 46 |
+
self._by_session_id[session_id] = entry
|
| 47 |
+
|
| 48 |
+
def lookup(self, fingerprint: str) -> ConversationEntry | None:
|
| 49 |
+
entry = self._by_fingerprint.get(fingerprint)
|
| 50 |
+
if entry is not None:
|
| 51 |
+
entry.last_used_at = time.time()
|
| 52 |
+
return entry
|
| 53 |
+
|
| 54 |
+
def remove_session(self, session_id: str) -> None:
|
| 55 |
+
entry = self._by_session_id.pop(session_id, None)
|
| 56 |
+
if entry is not None:
|
| 57 |
+
self._by_fingerprint.pop(entry.fingerprint, None)
|
| 58 |
+
|
| 59 |
+
def evict_stale(self, ttl: float) -> list[str]:
|
| 60 |
+
"""Remove entries older than *ttl* seconds. Returns evicted session IDs."""
|
| 61 |
+
now = time.time()
|
| 62 |
+
stale = [
|
| 63 |
+
e.session_id
|
| 64 |
+
for e in self._by_fingerprint.values()
|
| 65 |
+
if (now - e.last_used_at) > ttl
|
| 66 |
+
]
|
| 67 |
+
for sid in stale:
|
| 68 |
+
self.remove_session(sid)
|
| 69 |
+
return stale
|
| 70 |
+
|
| 71 |
+
def __len__(self) -> int:
|
| 72 |
+
return len(self._by_fingerprint)
|
core/runtime/keys.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""运行时键类型:代理组唯一标识。"""
|
| 2 |
+
|
| 3 |
+
from typing import NamedTuple
|
| 4 |
+
|
| 5 |
+
from core.constants import TIMEZONE
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class ProxyKey(NamedTuple):
|
| 9 |
+
"""唯一标识一个代理组(一个浏览器进程)。"""
|
| 10 |
+
|
| 11 |
+
proxy_host: str
|
| 12 |
+
proxy_user: str
|
| 13 |
+
fingerprint_id: str
|
| 14 |
+
use_proxy: bool = True
|
| 15 |
+
timezone: str = TIMEZONE
|
core/runtime/local_proxy_forwarder.py
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
本地中转代理(forward proxy)。
|
| 5 |
+
|
| 6 |
+
用途:
|
| 7 |
+
- 浏览器只配置无鉴权的本地代理:127.0.0.1:<port>
|
| 8 |
+
- 本地代理再转发到“带用户名密码鉴权”的上游代理(HTTP proxy)
|
| 9 |
+
|
| 10 |
+
实现重点:
|
| 11 |
+
- 支持 CONNECT(HTTPS 隧道)——浏览器最常见的代理用法
|
| 12 |
+
- 兼容少量 HTTP 明文请求(GET http://... 这种 absolute-form)
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
from __future__ import annotations
|
| 16 |
+
|
| 17 |
+
import base64
|
| 18 |
+
import contextlib
|
| 19 |
+
import select
|
| 20 |
+
import socket
|
| 21 |
+
import socketserver
|
| 22 |
+
import threading
|
| 23 |
+
from dataclasses import dataclass
|
| 24 |
+
from typing import Callable, Optional
|
| 25 |
+
from urllib.parse import urlparse
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _basic_proxy_auth(username: str, password: str) -> str:
|
| 29 |
+
raw = f"{username}:{password}".encode("utf-8")
|
| 30 |
+
return "Basic " + base64.b64encode(raw).decode("ascii")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _recv_until(
|
| 34 |
+
sock: socket.socket, marker: bytes, max_bytes: int = 256 * 1024
|
| 35 |
+
) -> bytes:
|
| 36 |
+
data = bytearray()
|
| 37 |
+
while marker not in data:
|
| 38 |
+
chunk = sock.recv(4096)
|
| 39 |
+
if not chunk:
|
| 40 |
+
break
|
| 41 |
+
data += chunk
|
| 42 |
+
if len(data) > max_bytes:
|
| 43 |
+
break
|
| 44 |
+
return bytes(data)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _split_headers(data: bytes) -> tuple[bytes, bytes]:
|
| 48 |
+
idx = data.find(b"\r\n\r\n")
|
| 49 |
+
if idx < 0:
|
| 50 |
+
return data, b""
|
| 51 |
+
return data[: idx + 4], data[idx + 4 :]
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _parse_first_line(header_bytes: bytes) -> tuple[str, str, str]:
|
| 55 |
+
# e.g. "CONNECT example.com:443 HTTP/1.1"
|
| 56 |
+
first = header_bytes.split(b"\r\n", 1)[0].decode("latin-1", errors="replace")
|
| 57 |
+
parts = first.strip().split()
|
| 58 |
+
if len(parts) >= 3:
|
| 59 |
+
return parts[0].upper(), parts[1], parts[2]
|
| 60 |
+
if len(parts) == 2:
|
| 61 |
+
return parts[0].upper(), parts[1], "HTTP/1.1"
|
| 62 |
+
return "GET", "/", "HTTP/1.1"
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _remove_hop_by_hop_headers(header_bytes: bytes) -> bytes:
|
| 66 |
+
# 仅做最小处理:去掉 Proxy-Authorization / Proxy-Connection,避免重复/冲突
|
| 67 |
+
lines = header_bytes.split(b"\r\n")
|
| 68 |
+
if not lines:
|
| 69 |
+
return header_bytes
|
| 70 |
+
out = [lines[0]]
|
| 71 |
+
for line in lines[1:]:
|
| 72 |
+
lower = line.lower()
|
| 73 |
+
if lower.startswith(b"proxy-authorization:"):
|
| 74 |
+
continue
|
| 75 |
+
if lower.startswith(b"proxy-connection:"):
|
| 76 |
+
continue
|
| 77 |
+
out.append(line)
|
| 78 |
+
return b"\r\n".join(out)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _relay_bidi(a: socket.socket, b: socket.socket, stop_evt: threading.Event) -> None:
|
| 82 |
+
a.setblocking(False)
|
| 83 |
+
b.setblocking(False)
|
| 84 |
+
socks = [a, b]
|
| 85 |
+
try:
|
| 86 |
+
while not stop_evt.is_set():
|
| 87 |
+
r, _, _ = select.select(socks, [], [], 0.5)
|
| 88 |
+
if not r:
|
| 89 |
+
continue
|
| 90 |
+
for s in r:
|
| 91 |
+
try:
|
| 92 |
+
data = s.recv(65536)
|
| 93 |
+
except BlockingIOError:
|
| 94 |
+
continue
|
| 95 |
+
if not data:
|
| 96 |
+
stop_evt.set()
|
| 97 |
+
break
|
| 98 |
+
other = b if s is a else a
|
| 99 |
+
try:
|
| 100 |
+
other.sendall(data)
|
| 101 |
+
except OSError:
|
| 102 |
+
stop_evt.set()
|
| 103 |
+
break
|
| 104 |
+
finally:
|
| 105 |
+
with contextlib.suppress(Exception):
|
| 106 |
+
a.close()
|
| 107 |
+
with contextlib.suppress(Exception):
|
| 108 |
+
b.close()
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class _ThreadingTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
|
| 112 |
+
allow_reuse_address = True
|
| 113 |
+
daemon_threads = True
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
@dataclass(frozen=True)
|
| 117 |
+
class UpstreamProxy:
|
| 118 |
+
host: str
|
| 119 |
+
port: int
|
| 120 |
+
username: str
|
| 121 |
+
password: str
|
| 122 |
+
|
| 123 |
+
@property
|
| 124 |
+
def auth_header_value(self) -> str:
|
| 125 |
+
return _basic_proxy_auth(self.username, self.password)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def parse_proxy_server(proxy_server: str) -> tuple[str, int]:
|
| 129 |
+
"""
|
| 130 |
+
支持:
|
| 131 |
+
- http://host:port
|
| 132 |
+
- host:port
|
| 133 |
+
"""
|
| 134 |
+
s = (proxy_server or "").strip()
|
| 135 |
+
if not s:
|
| 136 |
+
raise ValueError("proxy_server 为空")
|
| 137 |
+
if "://" not in s:
|
| 138 |
+
s = "http://" + s
|
| 139 |
+
u = urlparse(s)
|
| 140 |
+
if not u.hostname or not u.port:
|
| 141 |
+
raise ValueError(f"无法解析 proxy_server: {proxy_server!r}")
|
| 142 |
+
return u.hostname, int(u.port)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
class LocalProxyForwarder:
|
| 146 |
+
"""
|
| 147 |
+
启动一个本地 HTTP 代理,并把请求/隧道转发到上游代理(带 Basic 鉴权)。
|
| 148 |
+
"""
|
| 149 |
+
|
| 150 |
+
def __init__(
|
| 151 |
+
self,
|
| 152 |
+
upstream: UpstreamProxy,
|
| 153 |
+
*,
|
| 154 |
+
listen_host: str = "127.0.0.1",
|
| 155 |
+
listen_port: int = 0,
|
| 156 |
+
on_log: Optional[Callable[[str], None]] = None,
|
| 157 |
+
) -> None:
|
| 158 |
+
self._upstream = upstream
|
| 159 |
+
self._listen_host = listen_host
|
| 160 |
+
self._listen_port = listen_port
|
| 161 |
+
self._on_log = on_log
|
| 162 |
+
|
| 163 |
+
self._server: _ThreadingTCPServer | None = None
|
| 164 |
+
self._thread: threading.Thread | None = None
|
| 165 |
+
|
| 166 |
+
@property
|
| 167 |
+
def port(self) -> int:
|
| 168 |
+
if not self._server:
|
| 169 |
+
raise RuntimeError("forwarder 尚未启动")
|
| 170 |
+
return int(self._server.server_address[1])
|
| 171 |
+
|
| 172 |
+
@property
|
| 173 |
+
def proxy_url(self) -> str:
|
| 174 |
+
return f"http://{self._listen_host}:{self.port}"
|
| 175 |
+
|
| 176 |
+
def _log(self, msg: str) -> None:
|
| 177 |
+
if self._on_log:
|
| 178 |
+
try:
|
| 179 |
+
self._on_log(msg)
|
| 180 |
+
except Exception:
|
| 181 |
+
pass
|
| 182 |
+
|
| 183 |
+
def start(self) -> "LocalProxyForwarder":
|
| 184 |
+
if self._server is not None:
|
| 185 |
+
return self
|
| 186 |
+
|
| 187 |
+
upstream = self._upstream
|
| 188 |
+
parent = self
|
| 189 |
+
|
| 190 |
+
class Handler(socketserver.BaseRequestHandler):
|
| 191 |
+
def handle(self) -> None:
|
| 192 |
+
client = self.request
|
| 193 |
+
try:
|
| 194 |
+
data = _recv_until(client, b"\r\n\r\n")
|
| 195 |
+
if not data:
|
| 196 |
+
return
|
| 197 |
+
header, rest = _split_headers(data)
|
| 198 |
+
method, target, _ver = _parse_first_line(header)
|
| 199 |
+
|
| 200 |
+
upstream_sock = socket.create_connection(
|
| 201 |
+
(upstream.host, upstream.port), timeout=15
|
| 202 |
+
)
|
| 203 |
+
upstream_sock.settimeout(20)
|
| 204 |
+
|
| 205 |
+
if method == "CONNECT":
|
| 206 |
+
# 通过上游代理建立到 target 的隧道
|
| 207 |
+
connect_req = (
|
| 208 |
+
f"CONNECT {target} HTTP/1.1\r\n"
|
| 209 |
+
f"Host: {target}\r\n"
|
| 210 |
+
f"Proxy-Authorization: {upstream.auth_header_value}\r\n"
|
| 211 |
+
f"Proxy-Connection: keep-alive\r\n"
|
| 212 |
+
f"Connection: keep-alive\r\n"
|
| 213 |
+
f"\r\n"
|
| 214 |
+
).encode("latin-1", errors="ignore")
|
| 215 |
+
upstream_sock.sendall(connect_req)
|
| 216 |
+
upstream_resp = _recv_until(upstream_sock, b"\r\n\r\n")
|
| 217 |
+
if not upstream_resp:
|
| 218 |
+
client.sendall(b"HTTP/1.1 502 Bad Gateway\r\n\r\n")
|
| 219 |
+
return
|
| 220 |
+
# 将上游响应直接回给浏览器(一般是 200 Connection Established)
|
| 221 |
+
client.sendall(upstream_resp)
|
| 222 |
+
|
| 223 |
+
# CONNECT 时,header 后可能不会有 body;但如果有残留,丢给上游
|
| 224 |
+
if rest:
|
| 225 |
+
upstream_sock.sendall(rest)
|
| 226 |
+
|
| 227 |
+
stop_evt = threading.Event()
|
| 228 |
+
_relay_bidi(client, upstream_sock, stop_evt)
|
| 229 |
+
return
|
| 230 |
+
|
| 231 |
+
# 非 CONNECT:把请求转发给上游代理(absolute-form 请求)
|
| 232 |
+
# 注:这里只做最小实现,主要为兼容偶发 http:// 明文请求
|
| 233 |
+
filtered = _remove_hop_by_hop_headers(header)
|
| 234 |
+
# 插入 Proxy-Authorization
|
| 235 |
+
parts = filtered.split(b"\r\n")
|
| 236 |
+
out_lines = [parts[0]]
|
| 237 |
+
inserted = False
|
| 238 |
+
for line in parts[1:]:
|
| 239 |
+
if not inserted and line == b"":
|
| 240 |
+
out_lines.append(
|
| 241 |
+
f"Proxy-Authorization: {upstream.auth_header_value}".encode(
|
| 242 |
+
"latin-1", errors="ignore"
|
| 243 |
+
)
|
| 244 |
+
)
|
| 245 |
+
inserted = True
|
| 246 |
+
out_lines.append(line)
|
| 247 |
+
new_header = b"\r\n".join(out_lines)
|
| 248 |
+
upstream_sock.sendall(new_header)
|
| 249 |
+
if rest:
|
| 250 |
+
upstream_sock.sendall(rest)
|
| 251 |
+
|
| 252 |
+
# 单向把响应回写给客户端直到连接关闭
|
| 253 |
+
while True:
|
| 254 |
+
chunk = upstream_sock.recv(65536)
|
| 255 |
+
if not chunk:
|
| 256 |
+
break
|
| 257 |
+
client.sendall(chunk)
|
| 258 |
+
except Exception as e:
|
| 259 |
+
parent._log(f"[proxy] handler error: {e}")
|
| 260 |
+
with contextlib.suppress(Exception):
|
| 261 |
+
client.sendall(b"HTTP/1.1 502 Bad Gateway\r\n\r\n")
|
| 262 |
+
finally:
|
| 263 |
+
with contextlib.suppress(Exception):
|
| 264 |
+
client.close()
|
| 265 |
+
|
| 266 |
+
self._server = _ThreadingTCPServer(
|
| 267 |
+
(self._listen_host, self._listen_port), Handler
|
| 268 |
+
)
|
| 269 |
+
self._thread = threading.Thread(target=self._server.serve_forever, daemon=True)
|
| 270 |
+
self._thread.start()
|
| 271 |
+
return self
|
| 272 |
+
|
| 273 |
+
def stop(self) -> None:
|
| 274 |
+
if self._server is None:
|
| 275 |
+
return
|
| 276 |
+
with contextlib.suppress(Exception):
|
| 277 |
+
self._server.shutdown()
|
| 278 |
+
with contextlib.suppress(Exception):
|
| 279 |
+
self._server.server_close()
|
| 280 |
+
self._server = None
|
| 281 |
+
self._thread = None
|
| 282 |
+
|
| 283 |
+
def __enter__(self) -> "LocalProxyForwarder":
|
| 284 |
+
return self.start()
|
| 285 |
+
|
| 286 |
+
def __exit__(self, exc_type, exc, tb) -> None:
|
| 287 |
+
self.stop()
|
core/runtime/session_cache.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
会话缓存:session_id 全局唯一,映射到 (proxy_key, type, account_id)。
|
| 3 |
+
|
| 4 |
+
当前架构下 session 绑定到某个 tab/account:
|
| 5 |
+
|
| 6 |
+
- tab 被关闭或切号时,需要批量失效该 tab 下的 session
|
| 7 |
+
- 单个 session 失效时,需要从缓存中移除,后续按完整历史重建
|
| 8 |
+
- 超过 TTL 的 session 在维护循环中被自动清理
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from dataclasses import dataclass
|
| 12 |
+
import time
|
| 13 |
+
|
| 14 |
+
from core.runtime.keys import ProxyKey
|
| 15 |
+
|
| 16 |
+
# Sessions older than this are eligible for eviction during maintenance.
|
| 17 |
+
SESSION_TTL_SECONDS = 1800.0 # 30 minutes
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@dataclass
|
| 21 |
+
class SessionEntry:
|
| 22 |
+
"""单条会话:用于通过 session_id 反查 context/page 与账号。"""
|
| 23 |
+
|
| 24 |
+
proxy_key: ProxyKey
|
| 25 |
+
type_name: str
|
| 26 |
+
account_id: str
|
| 27 |
+
last_used_at: float
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class SessionCache:
|
| 31 |
+
"""进程内会话缓存,不持久化、不跨进程。"""
|
| 32 |
+
|
| 33 |
+
def __init__(self) -> None:
|
| 34 |
+
self._store: dict[str, SessionEntry] = {}
|
| 35 |
+
|
| 36 |
+
def get(self, session_id: str) -> SessionEntry | None:
|
| 37 |
+
return self._store.get(session_id)
|
| 38 |
+
|
| 39 |
+
def put(
|
| 40 |
+
self,
|
| 41 |
+
session_id: str,
|
| 42 |
+
proxy_key: ProxyKey,
|
| 43 |
+
type_name: str,
|
| 44 |
+
account_id: str,
|
| 45 |
+
) -> None:
|
| 46 |
+
self._store[session_id] = SessionEntry(
|
| 47 |
+
proxy_key=proxy_key,
|
| 48 |
+
type_name=type_name,
|
| 49 |
+
account_id=account_id,
|
| 50 |
+
last_used_at=time.time(),
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
def touch(self, session_id: str) -> None:
|
| 54 |
+
entry = self._store.get(session_id)
|
| 55 |
+
if entry is not None:
|
| 56 |
+
entry.last_used_at = time.time()
|
| 57 |
+
|
| 58 |
+
def delete(self, session_id: str) -> None:
|
| 59 |
+
self._store.pop(session_id, None)
|
| 60 |
+
|
| 61 |
+
def delete_many(self, session_ids: list[str] | set[str]) -> None:
|
| 62 |
+
for session_id in session_ids:
|
| 63 |
+
self._store.pop(session_id, None)
|
| 64 |
+
|
| 65 |
+
def evict_stale(self, ttl: float = SESSION_TTL_SECONDS) -> list[str]:
|
| 66 |
+
"""Remove sessions older than *ttl* seconds. Returns evicted IDs."""
|
| 67 |
+
now = time.time()
|
| 68 |
+
stale = [
|
| 69 |
+
sid
|
| 70 |
+
for sid, entry in self._store.items()
|
| 71 |
+
if (now - entry.last_used_at) > ttl
|
| 72 |
+
]
|
| 73 |
+
for sid in stale:
|
| 74 |
+
del self._store[sid]
|
| 75 |
+
return stale
|
| 76 |
+
|
| 77 |
+
def __contains__(self, session_id: str) -> bool:
|
| 78 |
+
return session_id in self._store
|
| 79 |
+
|
| 80 |
+
def __len__(self) -> int:
|
| 81 |
+
return len(self._store)
|
core/static/config.html
ADDED
|
@@ -0,0 +1,1698 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8" />
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
| 6 |
+
<title>Web2API configuration</title>
|
| 7 |
+
<style>
|
| 8 |
+
:root {
|
| 9 |
+
--bg: #120f0d;
|
| 10 |
+
--bg-deep: #080706;
|
| 11 |
+
--panel: rgba(28, 23, 20, 0.88);
|
| 12 |
+
--panel-strong: rgba(38, 32, 27, 0.96);
|
| 13 |
+
--line: rgba(247, 239, 230, 0.12);
|
| 14 |
+
--line-strong: rgba(247, 239, 230, 0.18);
|
| 15 |
+
--text: #f7efe6;
|
| 16 |
+
--muted: #b7aa98;
|
| 17 |
+
--accent: #efd9bc;
|
| 18 |
+
--accent-strong: #ddb98a;
|
| 19 |
+
--success-bg: rgba(22, 101, 52, 0.26);
|
| 20 |
+
--success-text: #c8f2d6;
|
| 21 |
+
--danger-bg: rgba(127, 29, 29, 0.3);
|
| 22 |
+
--danger-text: #fecaca;
|
| 23 |
+
--warning-bg: rgba(124, 45, 18, 0.32);
|
| 24 |
+
--warning-text: #fed7aa;
|
| 25 |
+
--shadow: 0 32px 100px rgba(0, 0, 0, 0.42);
|
| 26 |
+
--radius: 28px;
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
* {
|
| 30 |
+
box-sizing: border-box;
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
body {
|
| 34 |
+
margin: 0;
|
| 35 |
+
color: var(--text);
|
| 36 |
+
font-family: "Avenir Next", "Segoe UI", ui-sans-serif, system-ui, sans-serif;
|
| 37 |
+
background:
|
| 38 |
+
radial-gradient(circle at top left, rgba(239, 217, 188, 0.14), transparent 28%),
|
| 39 |
+
radial-gradient(circle at 85% 8%, rgba(163, 120, 76, 0.16), transparent 20%),
|
| 40 |
+
linear-gradient(180deg, #171210 0%, var(--bg) 38%, var(--bg-deep) 100%);
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
body::before {
|
| 44 |
+
content: "";
|
| 45 |
+
position: fixed;
|
| 46 |
+
inset: 0;
|
| 47 |
+
pointer-events: none;
|
| 48 |
+
opacity: 0.12;
|
| 49 |
+
background-image: linear-gradient(rgba(255, 255, 255, 0.03) 1px, transparent 1px);
|
| 50 |
+
background-size: 100% 3px;
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
code,
|
| 54 |
+
textarea,
|
| 55 |
+
pre {
|
| 56 |
+
font-family: "SFMono-Regular", "JetBrains Mono", "Cascadia Code", ui-monospace, monospace;
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
h1,
|
| 60 |
+
h2,
|
| 61 |
+
h3 {
|
| 62 |
+
font-family: "Iowan Old Style", "Palatino Linotype", "Book Antiqua", Georgia, serif;
|
| 63 |
+
letter-spacing: -0.03em;
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
.shell {
|
| 67 |
+
width: min(1280px, calc(100vw - 28px));
|
| 68 |
+
margin: 0 auto;
|
| 69 |
+
padding: 22px 0 44px;
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
.panel {
|
| 73 |
+
position: relative;
|
| 74 |
+
overflow: hidden;
|
| 75 |
+
border: 1px solid var(--line);
|
| 76 |
+
border-radius: var(--radius);
|
| 77 |
+
background: linear-gradient(180deg, rgba(41, 34, 30, 0.94), rgba(24, 20, 17, 0.9));
|
| 78 |
+
box-shadow: var(--shadow);
|
| 79 |
+
backdrop-filter: blur(18px);
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
.panel::after {
|
| 83 |
+
content: "";
|
| 84 |
+
position: absolute;
|
| 85 |
+
inset: auto -10% 0;
|
| 86 |
+
height: 1px;
|
| 87 |
+
background: linear-gradient(90deg, transparent, rgba(247, 239, 230, 0.22), transparent);
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
.hero {
|
| 91 |
+
display: flex;
|
| 92 |
+
flex-wrap: wrap;
|
| 93 |
+
align-items: flex-start;
|
| 94 |
+
justify-content: space-between;
|
| 95 |
+
gap: 18px;
|
| 96 |
+
padding: 28px;
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
.hero-copy {
|
| 100 |
+
max-width: 720px;
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
.eyebrow {
|
| 104 |
+
display: inline-flex;
|
| 105 |
+
align-items: center;
|
| 106 |
+
gap: 8px;
|
| 107 |
+
padding: 8px 14px;
|
| 108 |
+
border-radius: 999px;
|
| 109 |
+
border: 1px solid rgba(239, 217, 188, 0.18);
|
| 110 |
+
background: rgba(239, 217, 188, 0.08);
|
| 111 |
+
color: var(--accent-strong);
|
| 112 |
+
font-size: 11px;
|
| 113 |
+
font-weight: 700;
|
| 114 |
+
letter-spacing: 0.18em;
|
| 115 |
+
text-transform: uppercase;
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
h1 {
|
| 119 |
+
margin: 18px 0 10px;
|
| 120 |
+
font-size: clamp(2.4rem, 5vw, 4.25rem);
|
| 121 |
+
line-height: 0.96;
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
.hero-copy p {
|
| 125 |
+
margin: 0;
|
| 126 |
+
color: var(--muted);
|
| 127 |
+
line-height: 1.8;
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
.toolbar {
|
| 131 |
+
display: flex;
|
| 132 |
+
flex-wrap: wrap;
|
| 133 |
+
gap: 12px;
|
| 134 |
+
align-items: center;
|
| 135 |
+
justify-content: flex-end;
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
.btn {
|
| 139 |
+
display: inline-flex;
|
| 140 |
+
align-items: center;
|
| 141 |
+
justify-content: center;
|
| 142 |
+
min-height: 44px;
|
| 143 |
+
padding: 0 18px;
|
| 144 |
+
border-radius: 999px;
|
| 145 |
+
border: 1px solid var(--line);
|
| 146 |
+
background: rgba(255, 255, 255, 0.04);
|
| 147 |
+
color: var(--text);
|
| 148 |
+
font-size: 14px;
|
| 149 |
+
font-weight: 600;
|
| 150 |
+
cursor: pointer;
|
| 151 |
+
transition:
|
| 152 |
+
transform 0.16s ease,
|
| 153 |
+
border-color 0.16s ease,
|
| 154 |
+
background 0.16s ease;
|
| 155 |
+
}
|
| 156 |
+
|
| 157 |
+
.btn:hover:not(:disabled) {
|
| 158 |
+
transform: translateY(-1px);
|
| 159 |
+
border-color: var(--line-strong);
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
.btn:disabled {
|
| 163 |
+
opacity: 0.62;
|
| 164 |
+
cursor: wait;
|
| 165 |
+
}
|
| 166 |
+
|
| 167 |
+
.btn-primary {
|
| 168 |
+
border-color: transparent;
|
| 169 |
+
background: linear-gradient(135deg, var(--accent), var(--accent-strong));
|
| 170 |
+
color: #1a140f;
|
| 171 |
+
}
|
| 172 |
+
|
| 173 |
+
.btn-danger {
|
| 174 |
+
background: rgba(239, 68, 68, 0.2);
|
| 175 |
+
color: #fecaca;
|
| 176 |
+
border-color: rgba(248, 113, 113, 0.22);
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
.btn-sm {
|
| 180 |
+
min-height: 34px;
|
| 181 |
+
padding: 0 14px;
|
| 182 |
+
font-size: 12px;
|
| 183 |
+
}
|
| 184 |
+
|
| 185 |
+
.stack {
|
| 186 |
+
display: grid;
|
| 187 |
+
gap: 18px;
|
| 188 |
+
margin-top: 18px;
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
.top-grid {
|
| 192 |
+
display: grid;
|
| 193 |
+
grid-template-columns: minmax(0, 1.2fr) minmax(320px, 0.8fr);
|
| 194 |
+
gap: 18px;
|
| 195 |
+
}
|
| 196 |
+
|
| 197 |
+
.section {
|
| 198 |
+
padding: 24px;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
.section-head {
|
| 202 |
+
display: flex;
|
| 203 |
+
flex-wrap: wrap;
|
| 204 |
+
justify-content: space-between;
|
| 205 |
+
align-items: flex-start;
|
| 206 |
+
gap: 10px;
|
| 207 |
+
margin-bottom: 18px;
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
.section-head h2 {
|
| 211 |
+
margin: 0;
|
| 212 |
+
font-size: 1.9rem;
|
| 213 |
+
}
|
| 214 |
+
|
| 215 |
+
.section-head p,
|
| 216 |
+
.field-note,
|
| 217 |
+
.meta,
|
| 218 |
+
.stats,
|
| 219 |
+
.empty {
|
| 220 |
+
margin: 0;
|
| 221 |
+
color: var(--muted);
|
| 222 |
+
line-height: 1.7;
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
.pill-row {
|
| 226 |
+
display: flex;
|
| 227 |
+
flex-wrap: wrap;
|
| 228 |
+
gap: 10px;
|
| 229 |
+
margin-bottom: 16px;
|
| 230 |
+
}
|
| 231 |
+
|
| 232 |
+
.pill {
|
| 233 |
+
display: inline-flex;
|
| 234 |
+
align-items: center;
|
| 235 |
+
gap: 8px;
|
| 236 |
+
padding: 7px 12px;
|
| 237 |
+
border-radius: 999px;
|
| 238 |
+
border: 1px solid var(--line);
|
| 239 |
+
background: rgba(255, 255, 255, 0.04);
|
| 240 |
+
font-size: 12px;
|
| 241 |
+
color: var(--muted);
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
.pill strong {
|
| 245 |
+
color: var(--text);
|
| 246 |
+
font-weight: 600;
|
| 247 |
+
}
|
| 248 |
+
|
| 249 |
+
.field-grid {
|
| 250 |
+
display: grid;
|
| 251 |
+
grid-template-columns: repeat(2, minmax(0, 1fr));
|
| 252 |
+
gap: 14px;
|
| 253 |
+
}
|
| 254 |
+
|
| 255 |
+
.field.span-2 {
|
| 256 |
+
grid-column: span 2;
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
label {
|
| 260 |
+
display: block;
|
| 261 |
+
margin-bottom: 8px;
|
| 262 |
+
color: var(--muted);
|
| 263 |
+
font-size: 12px;
|
| 264 |
+
letter-spacing: 0.08em;
|
| 265 |
+
text-transform: uppercase;
|
| 266 |
+
}
|
| 267 |
+
|
| 268 |
+
input,
|
| 269 |
+
textarea,
|
| 270 |
+
select {
|
| 271 |
+
width: 100%;
|
| 272 |
+
padding: 12px 14px;
|
| 273 |
+
border-radius: 18px;
|
| 274 |
+
border: 1px solid var(--line);
|
| 275 |
+
background: rgba(9, 8, 7, 0.42);
|
| 276 |
+
color: var(--text);
|
| 277 |
+
font-size: 14px;
|
| 278 |
+
}
|
| 279 |
+
|
| 280 |
+
textarea {
|
| 281 |
+
resize: vertical;
|
| 282 |
+
min-height: 112px;
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
input:focus,
|
| 286 |
+
textarea:focus,
|
| 287 |
+
select:focus {
|
| 288 |
+
outline: none;
|
| 289 |
+
border-color: rgba(239, 217, 188, 0.32);
|
| 290 |
+
box-shadow: 0 0 0 4px rgba(239, 217, 188, 0.08);
|
| 291 |
+
}
|
| 292 |
+
|
| 293 |
+
input[readonly],
|
| 294 |
+
textarea[readonly] {
|
| 295 |
+
color: var(--muted);
|
| 296 |
+
}
|
| 297 |
+
|
| 298 |
+
.inline-state {
|
| 299 |
+
display: flex;
|
| 300 |
+
align-items: center;
|
| 301 |
+
min-height: 48px;
|
| 302 |
+
padding: 0 14px;
|
| 303 |
+
border-radius: 18px;
|
| 304 |
+
border: 1px solid var(--line);
|
| 305 |
+
background: rgba(255, 255, 255, 0.03);
|
| 306 |
+
color: var(--text);
|
| 307 |
+
}
|
| 308 |
+
|
| 309 |
+
.sub-actions {
|
| 310 |
+
display: flex;
|
| 311 |
+
flex-wrap: wrap;
|
| 312 |
+
gap: 12px;
|
| 313 |
+
margin-top: 16px;
|
| 314 |
+
}
|
| 315 |
+
|
| 316 |
+
.models-list {
|
| 317 |
+
display: grid;
|
| 318 |
+
gap: 12px;
|
| 319 |
+
}
|
| 320 |
+
|
| 321 |
+
.model-card {
|
| 322 |
+
padding: 16px;
|
| 323 |
+
border-radius: 22px;
|
| 324 |
+
border: 1px solid var(--line);
|
| 325 |
+
background: rgba(255, 255, 255, 0.03);
|
| 326 |
+
}
|
| 327 |
+
|
| 328 |
+
.model-card .kicker {
|
| 329 |
+
display: inline-flex;
|
| 330 |
+
margin-bottom: 10px;
|
| 331 |
+
padding: 5px 10px;
|
| 332 |
+
border-radius: 999px;
|
| 333 |
+
background: rgba(169, 215, 184, 0.12);
|
| 334 |
+
color: #cdebd6;
|
| 335 |
+
font-size: 11px;
|
| 336 |
+
font-weight: 700;
|
| 337 |
+
letter-spacing: 0.08em;
|
| 338 |
+
text-transform: uppercase;
|
| 339 |
+
}
|
| 340 |
+
|
| 341 |
+
.model-card h3 {
|
| 342 |
+
margin: 0 0 10px;
|
| 343 |
+
font-size: 1.08rem;
|
| 344 |
+
}
|
| 345 |
+
|
| 346 |
+
.search-grid {
|
| 347 |
+
display: grid;
|
| 348 |
+
grid-template-columns: minmax(0, 1fr) auto;
|
| 349 |
+
gap: 14px;
|
| 350 |
+
align-items: end;
|
| 351 |
+
}
|
| 352 |
+
|
| 353 |
+
.stats {
|
| 354 |
+
padding-bottom: 2px;
|
| 355 |
+
white-space: nowrap;
|
| 356 |
+
}
|
| 357 |
+
|
| 358 |
+
.hint,
|
| 359 |
+
.warn {
|
| 360 |
+
padding: 14px 16px;
|
| 361 |
+
border-radius: 20px;
|
| 362 |
+
border: 1px solid var(--line);
|
| 363 |
+
line-height: 1.75;
|
| 364 |
+
}
|
| 365 |
+
|
| 366 |
+
.hint {
|
| 367 |
+
background: rgba(255, 255, 255, 0.03);
|
| 368 |
+
}
|
| 369 |
+
|
| 370 |
+
.warn {
|
| 371 |
+
margin-bottom: 14px;
|
| 372 |
+
background: var(--warning-bg);
|
| 373 |
+
border-color: rgba(251, 146, 60, 0.22);
|
| 374 |
+
color: var(--warning-text);
|
| 375 |
+
}
|
| 376 |
+
|
| 377 |
+
.group {
|
| 378 |
+
border: 1px solid var(--line);
|
| 379 |
+
border-radius: 24px;
|
| 380 |
+
background: linear-gradient(180deg, rgba(36, 30, 26, 0.96), rgba(21, 18, 15, 0.88));
|
| 381 |
+
padding: 20px;
|
| 382 |
+
}
|
| 383 |
+
|
| 384 |
+
.group + .group {
|
| 385 |
+
margin-top: 16px;
|
| 386 |
+
}
|
| 387 |
+
|
| 388 |
+
.group-header {
|
| 389 |
+
display: flex;
|
| 390 |
+
align-items: center;
|
| 391 |
+
justify-content: space-between;
|
| 392 |
+
gap: 14px;
|
| 393 |
+
cursor: pointer;
|
| 394 |
+
user-select: none;
|
| 395 |
+
}
|
| 396 |
+
|
| 397 |
+
.group-header h3 {
|
| 398 |
+
margin: 0;
|
| 399 |
+
display: flex;
|
| 400 |
+
align-items: center;
|
| 401 |
+
gap: 10px;
|
| 402 |
+
font-size: 1.18rem;
|
| 403 |
+
}
|
| 404 |
+
|
| 405 |
+
.group-header .meta {
|
| 406 |
+
margin-top: 4px;
|
| 407 |
+
}
|
| 408 |
+
|
| 409 |
+
.chevron {
|
| 410 |
+
display: inline-flex;
|
| 411 |
+
transition: transform 0.18s ease;
|
| 412 |
+
}
|
| 413 |
+
|
| 414 |
+
.group.collapsed .chevron {
|
| 415 |
+
transform: rotate(-90deg);
|
| 416 |
+
}
|
| 417 |
+
|
| 418 |
+
.group.collapsed .group-body,
|
| 419 |
+
.group.collapsed .accounts {
|
| 420 |
+
display: none;
|
| 421 |
+
}
|
| 422 |
+
|
| 423 |
+
.group-actions {
|
| 424 |
+
display: flex;
|
| 425 |
+
flex-wrap: wrap;
|
| 426 |
+
gap: 10px;
|
| 427 |
+
}
|
| 428 |
+
|
| 429 |
+
.row {
|
| 430 |
+
display: grid;
|
| 431 |
+
grid-template-columns: repeat(auto-fit, minmax(160px, 1fr));
|
| 432 |
+
gap: 12px;
|
| 433 |
+
margin-top: 16px;
|
| 434 |
+
}
|
| 435 |
+
|
| 436 |
+
.row.proxy-one-row {
|
| 437 |
+
grid-template-columns: 1fr 2fr 4fr 2fr 1.3fr 2fr;
|
| 438 |
+
}
|
| 439 |
+
|
| 440 |
+
.accounts {
|
| 441 |
+
margin-top: 18px;
|
| 442 |
+
padding-top: 18px;
|
| 443 |
+
border-top: 1px solid var(--line);
|
| 444 |
+
}
|
| 445 |
+
|
| 446 |
+
.accounts-head {
|
| 447 |
+
display: flex;
|
| 448 |
+
flex-wrap: wrap;
|
| 449 |
+
justify-content: space-between;
|
| 450 |
+
gap: 10px;
|
| 451 |
+
align-items: center;
|
| 452 |
+
margin-bottom: 12px;
|
| 453 |
+
}
|
| 454 |
+
|
| 455 |
+
.accounts-head h4 {
|
| 456 |
+
margin: 0;
|
| 457 |
+
font-size: 1rem;
|
| 458 |
+
}
|
| 459 |
+
|
| 460 |
+
.account {
|
| 461 |
+
position: relative;
|
| 462 |
+
padding: 18px;
|
| 463 |
+
border-radius: 22px;
|
| 464 |
+
border: 1px solid var(--line);
|
| 465 |
+
background: rgba(10, 9, 8, 0.42);
|
| 466 |
+
}
|
| 467 |
+
|
| 468 |
+
.account + .account {
|
| 469 |
+
margin-top: 12px;
|
| 470 |
+
}
|
| 471 |
+
|
| 472 |
+
.account-status-badge {
|
| 473 |
+
position: absolute;
|
| 474 |
+
top: 14px;
|
| 475 |
+
right: 14px;
|
| 476 |
+
display: inline-flex;
|
| 477 |
+
align-items: center;
|
| 478 |
+
padding: 5px 10px;
|
| 479 |
+
border-radius: 999px;
|
| 480 |
+
border: 1px solid transparent;
|
| 481 |
+
font-size: 12px;
|
| 482 |
+
font-weight: 700;
|
| 483 |
+
}
|
| 484 |
+
|
| 485 |
+
.account-status-badge.active {
|
| 486 |
+
background: rgba(20, 83, 45, 0.36);
|
| 487 |
+
color: #c8f2d6;
|
| 488 |
+
border-color: rgba(74, 222, 128, 0.16);
|
| 489 |
+
}
|
| 490 |
+
|
| 491 |
+
.account-status-badge.frozen {
|
| 492 |
+
background: rgba(127, 29, 29, 0.36);
|
| 493 |
+
color: #fecaca;
|
| 494 |
+
border-color: rgba(248, 113, 113, 0.2);
|
| 495 |
+
}
|
| 496 |
+
|
| 497 |
+
.account-status-badge.disabled {
|
| 498 |
+
background: rgba(63, 63, 70, 0.36);
|
| 499 |
+
color: #e4e4e7;
|
| 500 |
+
border-color: rgba(212, 212, 216, 0.12);
|
| 501 |
+
}
|
| 502 |
+
|
| 503 |
+
.account-status-badge.idle {
|
| 504 |
+
background: rgba(30, 41, 59, 0.36);
|
| 505 |
+
color: #dbe7f5;
|
| 506 |
+
border-color: rgba(148, 163, 184, 0.16);
|
| 507 |
+
}
|
| 508 |
+
|
| 509 |
+
.account-row1 {
|
| 510 |
+
display: flex;
|
| 511 |
+
flex-wrap: wrap;
|
| 512 |
+
align-items: flex-end;
|
| 513 |
+
gap: 12px;
|
| 514 |
+
margin-bottom: 12px;
|
| 515 |
+
padding-top: 26px;
|
| 516 |
+
}
|
| 517 |
+
|
| 518 |
+
.account-row1 .f {
|
| 519 |
+
min-width: 120px;
|
| 520 |
+
}
|
| 521 |
+
|
| 522 |
+
.account-row1 .f.name {
|
| 523 |
+
min-width: 120px;
|
| 524 |
+
max-width: 180px;
|
| 525 |
+
}
|
| 526 |
+
|
| 527 |
+
.account-row1 .f.freeze-time {
|
| 528 |
+
min-width: 220px;
|
| 529 |
+
}
|
| 530 |
+
|
| 531 |
+
.account-row2 .f {
|
| 532 |
+
width: 100%;
|
| 533 |
+
}
|
| 534 |
+
|
| 535 |
+
.toggle {
|
| 536 |
+
display: inline-flex;
|
| 537 |
+
align-items: center;
|
| 538 |
+
gap: 8px;
|
| 539 |
+
min-height: 44px;
|
| 540 |
+
margin: 0;
|
| 541 |
+
color: var(--text);
|
| 542 |
+
}
|
| 543 |
+
|
| 544 |
+
.toggle input {
|
| 545 |
+
width: auto;
|
| 546 |
+
margin: 0;
|
| 547 |
+
}
|
| 548 |
+
|
| 549 |
+
.pagination {
|
| 550 |
+
display: flex;
|
| 551 |
+
flex-wrap: wrap;
|
| 552 |
+
align-items: center;
|
| 553 |
+
gap: 12px;
|
| 554 |
+
margin-top: 14px;
|
| 555 |
+
}
|
| 556 |
+
|
| 557 |
+
.pagination .info {
|
| 558 |
+
color: var(--muted);
|
| 559 |
+
font-size: 13px;
|
| 560 |
+
}
|
| 561 |
+
|
| 562 |
+
.msg {
|
| 563 |
+
position: fixed;
|
| 564 |
+
top: 16px;
|
| 565 |
+
left: 50%;
|
| 566 |
+
transform: translateX(-50%);
|
| 567 |
+
z-index: 1000;
|
| 568 |
+
display: none;
|
| 569 |
+
min-width: 260px;
|
| 570 |
+
max-width: min(560px, calc(100vw - 24px));
|
| 571 |
+
padding: 13px 16px;
|
| 572 |
+
border-radius: 16px;
|
| 573 |
+
border: 1px solid transparent;
|
| 574 |
+
box-shadow: 0 18px 60px rgba(0, 0, 0, 0.35);
|
| 575 |
+
}
|
| 576 |
+
|
| 577 |
+
.msg.success {
|
| 578 |
+
background: var(--success-bg);
|
| 579 |
+
color: var(--success-text);
|
| 580 |
+
border-color: rgba(74, 222, 128, 0.12);
|
| 581 |
+
}
|
| 582 |
+
|
| 583 |
+
.msg.error {
|
| 584 |
+
background: var(--danger-bg);
|
| 585 |
+
color: var(--danger-text);
|
| 586 |
+
border-color: rgba(248, 113, 113, 0.18);
|
| 587 |
+
}
|
| 588 |
+
|
| 589 |
+
.empty {
|
| 590 |
+
padding: 18px;
|
| 591 |
+
border-radius: 18px;
|
| 592 |
+
border: 1px dashed var(--line);
|
| 593 |
+
}
|
| 594 |
+
|
| 595 |
+
@media (max-width: 1120px) {
|
| 596 |
+
.top-grid {
|
| 597 |
+
grid-template-columns: 1fr;
|
| 598 |
+
}
|
| 599 |
+
}
|
| 600 |
+
|
| 601 |
+
@media (max-width: 920px) {
|
| 602 |
+
.row.proxy-one-row {
|
| 603 |
+
grid-template-columns: repeat(2, minmax(0, 1fr));
|
| 604 |
+
}
|
| 605 |
+
}
|
| 606 |
+
|
| 607 |
+
@media (max-width: 760px) {
|
| 608 |
+
.shell {
|
| 609 |
+
width: min(100vw - 18px, 1280px);
|
| 610 |
+
padding-top: 14px;
|
| 611 |
+
}
|
| 612 |
+
|
| 613 |
+
.hero,
|
| 614 |
+
.section,
|
| 615 |
+
.group {
|
| 616 |
+
padding: 18px;
|
| 617 |
+
}
|
| 618 |
+
|
| 619 |
+
.field-grid,
|
| 620 |
+
.search-grid {
|
| 621 |
+
grid-template-columns: 1fr;
|
| 622 |
+
}
|
| 623 |
+
|
| 624 |
+
.field.span-2 {
|
| 625 |
+
grid-column: span 1;
|
| 626 |
+
}
|
| 627 |
+
|
| 628 |
+
.toolbar {
|
| 629 |
+
width: 100%;
|
| 630 |
+
justify-content: flex-start;
|
| 631 |
+
}
|
| 632 |
+
|
| 633 |
+
.btn {
|
| 634 |
+
width: 100%;
|
| 635 |
+
}
|
| 636 |
+
|
| 637 |
+
.group-header {
|
| 638 |
+
flex-direction: column;
|
| 639 |
+
align-items: flex-start;
|
| 640 |
+
}
|
| 641 |
+
}
|
| 642 |
+
</style>
|
| 643 |
+
</head>
|
| 644 |
+
<body>
|
| 645 |
+
<div id="msg" class="msg"></div>
|
| 646 |
+
|
| 647 |
+
<main class="shell">
|
| 648 |
+
<section class="panel hero">
|
| 649 |
+
<div class="hero-copy">
|
| 650 |
+
<div class="eyebrow">Admin dashboard</div>
|
| 651 |
+
<h1>Web2API configuration</h1>
|
| 652 |
+
<p>
|
| 653 |
+
Manage proxy groups, account auth JSON, global API keys, the admin password, and the
|
| 654 |
+
public model mapping used by this bridge.
|
| 655 |
+
</p>
|
| 656 |
+
</div>
|
| 657 |
+
<div class="toolbar">
|
| 658 |
+
<button type="button" class="btn btn-primary" id="addGroup">Add proxy group</button>
|
| 659 |
+
<button type="button" class="btn" id="load">Reload</button>
|
| 660 |
+
<button type="button" class="btn btn-primary" id="save">Save config</button>
|
| 661 |
+
<button type="button" class="btn" id="logout">Logout</button>
|
| 662 |
+
</div>
|
| 663 |
+
</section>
|
| 664 |
+
|
| 665 |
+
<div class="stack">
|
| 666 |
+
<section class="top-grid">
|
| 667 |
+
<article class="panel section">
|
| 668 |
+
<div class="section-head">
|
| 669 |
+
<div>
|
| 670 |
+
<h2>Global auth settings</h2>
|
| 671 |
+
</div>
|
| 672 |
+
<p>
|
| 673 |
+
Database-backed values persist across restarts and take precedence once saved here.
|
| 674 |
+
Environment variables are used as the initial fallback when the database has no value.
|
| 675 |
+
</p>
|
| 676 |
+
</div>
|
| 677 |
+
<div class="pill-row">
|
| 678 |
+
<div class="pill" id="apiKeySourceBadge">API key source</div>
|
| 679 |
+
<div class="pill" id="adminPasswordSourceBadge">Admin password source</div>
|
| 680 |
+
</div>
|
| 681 |
+
<div class="field-grid">
|
| 682 |
+
<div class="field span-2">
|
| 683 |
+
<label for="globalApiKey">API keys</label>
|
| 684 |
+
<textarea
|
| 685 |
+
id="globalApiKey"
|
| 686 |
+
placeholder="One key per line, or use comma-separated values"
|
| 687 |
+
></textarea>
|
| 688 |
+
<p class="field-note" id="apiKeyHint"></p>
|
| 689 |
+
</div>
|
| 690 |
+
<div class="field">
|
| 691 |
+
<label for="globalAdminPassword">New admin password</label>
|
| 692 |
+
<input
|
| 693 |
+
id="globalAdminPassword"
|
| 694 |
+
type="password"
|
| 695 |
+
autocomplete="new-password"
|
| 696 |
+
placeholder="Leave blank to keep the current password"
|
| 697 |
+
/>
|
| 698 |
+
<p class="field-note" id="adminPasswordHint"></p>
|
| 699 |
+
</div>
|
| 700 |
+
<div class="field">
|
| 701 |
+
<label>Dashboard status</label>
|
| 702 |
+
<div class="inline-state" id="adminPasswordState">Loading…</div>
|
| 703 |
+
<p class="field-note">Saving a new password signs out the current dashboard session.</p>
|
| 704 |
+
</div>
|
| 705 |
+
</div>
|
| 706 |
+
<div class="sub-actions">
|
| 707 |
+
<button type="button" class="btn btn-primary" id="saveAuthSettings">
|
| 708 |
+
Save auth settings
|
| 709 |
+
</button>
|
| 710 |
+
<button type="button" class="btn btn-danger" id="clearAdminPassword">
|
| 711 |
+
Disable dashboard password
|
| 712 |
+
</button>
|
| 713 |
+
</div>
|
| 714 |
+
</article>
|
| 715 |
+
|
| 716 |
+
<article class="panel section">
|
| 717 |
+
<div class="section-head">
|
| 718 |
+
<div>
|
| 719 |
+
<h2>Supported models</h2>
|
| 720 |
+
</div>
|
| 721 |
+
<p>
|
| 722 |
+
These public IDs are exposed to clients and resolved to the upstream Claude model IDs
|
| 723 |
+
shown below.
|
| 724 |
+
</p>
|
| 725 |
+
</div>
|
| 726 |
+
<div style="display:flex;align-items:center;gap:10px;margin-bottom:12px;padding:0 4px;">
|
| 727 |
+
<label style="margin:0;font-weight:500;font-size:0.92em;cursor:pointer;" for="proModelsToggle">
|
| 728 |
+
Enable Pro models (Haiku, Opus)
|
| 729 |
+
</label>
|
| 730 |
+
<input type="checkbox" id="proModelsToggle" style="width:18px;height:18px;cursor:pointer;" />
|
| 731 |
+
<span id="proModelsStatus" style="font-size:0.85em;color:#888;">Loading…</span>
|
| 732 |
+
</div>
|
| 733 |
+
<div id="modelsList" class="models-list">
|
| 734 |
+
<div class="empty">Loading model metadata…</div>
|
| 735 |
+
</div>
|
| 736 |
+
</article>
|
| 737 |
+
</section>
|
| 738 |
+
|
| 739 |
+
<section class="panel section">
|
| 740 |
+
<div class="search-grid">
|
| 741 |
+
<div>
|
| 742 |
+
<label for="search">Search groups or accounts</label>
|
| 743 |
+
<input
|
| 744 |
+
type="text"
|
| 745 |
+
id="search"
|
| 746 |
+
placeholder="Filter by proxy host, proxy user, account name, or type"
|
| 747 |
+
/>
|
| 748 |
+
</div>
|
| 749 |
+
<div class="stats" id="stats"></div>
|
| 750 |
+
</div>
|
| 751 |
+
</section>
|
| 752 |
+
|
| 753 |
+
<section class="panel section">
|
| 754 |
+
<div class="hint">
|
| 755 |
+
<strong>Network mode:</strong> When <em>Use proxy</em> is enabled, the browser goes out
|
| 756 |
+
through the configured proxy. When disabled, the browser uses this machine’s own exit IP.
|
| 757 |
+
Avoid packing many accounts into one direct-connection group, or the upstream site may
|
| 758 |
+
link them together by IP.
|
| 759 |
+
</div>
|
| 760 |
+
<div id="list"></div>
|
| 761 |
+
</section>
|
| 762 |
+
</div>
|
| 763 |
+
</main>
|
| 764 |
+
|
| 765 |
+
<script>
|
| 766 |
+
const API = '/api/config'
|
| 767 |
+
const STATUS_API = '/api/config/status'
|
| 768 |
+
const TYPES_API = '/api/types'
|
| 769 |
+
const AUTH_SETTINGS_API = '/api/config/auth-settings'
|
| 770 |
+
const MODELS_API = '/api/config/models'
|
| 771 |
+
|
| 772 |
+
const PAGE_SIZE_OPTIONS = [10, 20, 50, 100]
|
| 773 |
+
const DEFAULT_PAGE_SIZE = 20
|
| 774 |
+
|
| 775 |
+
let registeredTypes = ['claude']
|
| 776 |
+
let runtimeStatus = {}
|
| 777 |
+
let runtimeNow = null
|
| 778 |
+
let authSettings = null
|
| 779 |
+
let modelMetadata = null
|
| 780 |
+
let config = []
|
| 781 |
+
let groupCollapsed = {}
|
| 782 |
+
let groupPage = {}
|
| 783 |
+
let groupPageSize = {}
|
| 784 |
+
|
| 785 |
+
const TIMEZONES = (() => {
|
| 786 |
+
if (typeof Intl !== 'undefined' && Intl.supportedValuesOf) {
|
| 787 |
+
try {
|
| 788 |
+
return Intl.supportedValuesOf('timeZone').sort()
|
| 789 |
+
} catch (_) {}
|
| 790 |
+
}
|
| 791 |
+
return [
|
| 792 |
+
'Africa/Cairo',
|
| 793 |
+
'Africa/Johannesburg',
|
| 794 |
+
'Africa/Lagos',
|
| 795 |
+
'Africa/Nairobi',
|
| 796 |
+
'Africa/Tunis',
|
| 797 |
+
'America/Argentina/Buenos_Aires',
|
| 798 |
+
'America/Bogota',
|
| 799 |
+
'America/Chicago',
|
| 800 |
+
'America/Denver',
|
| 801 |
+
'America/Lima',
|
| 802 |
+
'America/Los_Angeles',
|
| 803 |
+
'America/Mexico_City',
|
| 804 |
+
'America/New_York',
|
| 805 |
+
'America/Santiago',
|
| 806 |
+
'America/Sao_Paulo',
|
| 807 |
+
'America/Toronto',
|
| 808 |
+
'America/Vancouver',
|
| 809 |
+
'Asia/Bangkok',
|
| 810 |
+
'Asia/Colombo',
|
| 811 |
+
'Asia/Dubai',
|
| 812 |
+
'Asia/Ho_Chi_Minh',
|
| 813 |
+
'Asia/Hong_Kong',
|
| 814 |
+
'Asia/Jakarta',
|
| 815 |
+
'Asia/Jerusalem',
|
| 816 |
+
'Asia/Karachi',
|
| 817 |
+
'Asia/Kolkata',
|
| 818 |
+
'Asia/Manila',
|
| 819 |
+
'Asia/Seoul',
|
| 820 |
+
'Asia/Shanghai',
|
| 821 |
+
'Asia/Singapore',
|
| 822 |
+
'Asia/Taipei',
|
| 823 |
+
'Asia/Tehran',
|
| 824 |
+
'Asia/Tokyo',
|
| 825 |
+
'Australia/Adelaide',
|
| 826 |
+
'Australia/Melbourne',
|
| 827 |
+
'Australia/Perth',
|
| 828 |
+
'Australia/Sydney',
|
| 829 |
+
'Europe/Amsterdam',
|
| 830 |
+
'Europe/Athens',
|
| 831 |
+
'Europe/Berlin',
|
| 832 |
+
'Europe/Istanbul',
|
| 833 |
+
'Europe/London',
|
| 834 |
+
'Europe/Madrid',
|
| 835 |
+
'Europe/Moscow',
|
| 836 |
+
'Europe/Paris',
|
| 837 |
+
'Europe/Rome',
|
| 838 |
+
'Europe/Stockholm',
|
| 839 |
+
'Pacific/Auckland',
|
| 840 |
+
'Pacific/Fiji',
|
| 841 |
+
'Pacific/Guam',
|
| 842 |
+
'Pacific/Honolulu',
|
| 843 |
+
'UTC',
|
| 844 |
+
].sort()
|
| 845 |
+
})()
|
| 846 |
+
|
| 847 |
+
function escapeAttr(value) {
|
| 848 |
+
if (value == null) return ''
|
| 849 |
+
return String(value)
|
| 850 |
+
.replace(/&/g, '&')
|
| 851 |
+
.replace(/"/g, '"')
|
| 852 |
+
.replace(/</g, '<')
|
| 853 |
+
.replace(/>/g, '>')
|
| 854 |
+
}
|
| 855 |
+
|
| 856 |
+
function describeSource(source) {
|
| 857 |
+
if (source === 'env') return 'Environment'
|
| 858 |
+
if (source === 'db') return 'Database'
|
| 859 |
+
if (source === 'yaml') return 'YAML'
|
| 860 |
+
return 'Default'
|
| 861 |
+
}
|
| 862 |
+
|
| 863 |
+
function setButtonBusy(button, busy, busyLabel, idleLabel) {
|
| 864 |
+
if (!button) return
|
| 865 |
+
button.disabled = !!busy
|
| 866 |
+
button.textContent = busy ? busyLabel : idleLabel
|
| 867 |
+
}
|
| 868 |
+
|
| 869 |
+
async function parseResponse(res) {
|
| 870 |
+
const text = await res.text()
|
| 871 |
+
let data = null
|
| 872 |
+
try {
|
| 873 |
+
data = text ? JSON.parse(text) : null
|
| 874 |
+
} catch (_) {}
|
| 875 |
+
return { text, data }
|
| 876 |
+
}
|
| 877 |
+
|
| 878 |
+
async function apiFetch(url, options = {}) {
|
| 879 |
+
const res = await fetch(url, options)
|
| 880 |
+
if (res.status === 401) {
|
| 881 |
+
window.location.href = '/login'
|
| 882 |
+
throw new Error('Your dashboard session has expired. Please sign in again.')
|
| 883 |
+
}
|
| 884 |
+
return res
|
| 885 |
+
}
|
| 886 |
+
|
| 887 |
+
function showMsg(text, type = 'success') {
|
| 888 |
+
const el = document.getElementById('msg')
|
| 889 |
+
el.className = 'msg ' + type
|
| 890 |
+
el.textContent = text
|
| 891 |
+
el.style.display = 'block'
|
| 892 |
+
clearTimeout(showMsg._timer)
|
| 893 |
+
showMsg._timer = setTimeout(() => {
|
| 894 |
+
el.style.display = 'none'
|
| 895 |
+
}, 4200)
|
| 896 |
+
}
|
| 897 |
+
|
| 898 |
+
async function loadTypes() {
|
| 899 |
+
try {
|
| 900 |
+
const res = await apiFetch(TYPES_API)
|
| 901 |
+
if (!res.ok) return
|
| 902 |
+
const list = await res.json()
|
| 903 |
+
if (Array.isArray(list) && list.length) registeredTypes = list
|
| 904 |
+
} catch (_) {}
|
| 905 |
+
}
|
| 906 |
+
|
| 907 |
+
async function loadConfig() {
|
| 908 |
+
const res = await apiFetch(API)
|
| 909 |
+
const payload = await parseResponse(res)
|
| 910 |
+
if (!res.ok) throw new Error((payload.data && payload.data.detail) || payload.text || res.statusText)
|
| 911 |
+
return payload.data
|
| 912 |
+
}
|
| 913 |
+
|
| 914 |
+
async function loadStatus() {
|
| 915 |
+
const res = await apiFetch(STATUS_API)
|
| 916 |
+
const payload = await parseResponse(res)
|
| 917 |
+
if (!res.ok) throw new Error((payload.data && payload.data.detail) || payload.text || res.statusText)
|
| 918 |
+
return payload.data
|
| 919 |
+
}
|
| 920 |
+
|
| 921 |
+
async function loadAuthSettings() {
|
| 922 |
+
const res = await apiFetch(AUTH_SETTINGS_API)
|
| 923 |
+
const payload = await parseResponse(res)
|
| 924 |
+
if (!res.ok) throw new Error((payload.data && payload.data.detail) || payload.text || res.statusText)
|
| 925 |
+
return payload.data
|
| 926 |
+
}
|
| 927 |
+
|
| 928 |
+
async function loadModelMetadata() {
|
| 929 |
+
const res = await apiFetch(MODELS_API)
|
| 930 |
+
const payload = await parseResponse(res)
|
| 931 |
+
if (!res.ok) throw new Error((payload.data && payload.data.detail) || payload.text || res.statusText)
|
| 932 |
+
return payload.data
|
| 933 |
+
}
|
| 934 |
+
|
| 935 |
+
function authToStr(auth) {
|
| 936 |
+
if (auth == null || typeof auth !== 'object') return '{}'
|
| 937 |
+
try {
|
| 938 |
+
return JSON.stringify(auth, null, 2)
|
| 939 |
+
} catch {
|
| 940 |
+
return '{}'
|
| 941 |
+
}
|
| 942 |
+
}
|
| 943 |
+
|
| 944 |
+
function parseAuth(text) {
|
| 945 |
+
if (typeof text !== 'string' || !text.trim()) return {}
|
| 946 |
+
try {
|
| 947 |
+
const parsed = JSON.parse(text)
|
| 948 |
+
return typeof parsed === 'object' && parsed !== null ? parsed : {}
|
| 949 |
+
} catch {
|
| 950 |
+
return {}
|
| 951 |
+
}
|
| 952 |
+
}
|
| 953 |
+
|
| 954 |
+
function asUnix(value) {
|
| 955 |
+
if (typeof value === 'number' && Number.isFinite(value)) return Math.trunc(value)
|
| 956 |
+
if (typeof value === 'string' && value.trim()) {
|
| 957 |
+
const n = Number(value)
|
| 958 |
+
if (Number.isFinite(n)) return Math.trunc(n)
|
| 959 |
+
}
|
| 960 |
+
return null
|
| 961 |
+
}
|
| 962 |
+
|
| 963 |
+
function formatDateTime(value) {
|
| 964 |
+
const ts = asUnix(value)
|
| 965 |
+
if (ts == null || ts <= 0) return '—'
|
| 966 |
+
try {
|
| 967 |
+
return new Date(ts * 1000).toLocaleString(undefined, { hour12: false })
|
| 968 |
+
} catch (_) {
|
| 969 |
+
return String(ts)
|
| 970 |
+
}
|
| 971 |
+
}
|
| 972 |
+
|
| 973 |
+
function applyRuntimeStatus(payload) {
|
| 974 |
+
runtimeStatus = (payload && payload.accounts) || {}
|
| 975 |
+
runtimeNow =
|
| 976 |
+
payload && typeof payload.now === 'number' ? payload.now : Math.trunc(Date.now() / 1000)
|
| 977 |
+
}
|
| 978 |
+
|
| 979 |
+
function accountId(group, account) {
|
| 980 |
+
return `${group.fingerprint_id || ''}:${account.name || ''}`
|
| 981 |
+
}
|
| 982 |
+
|
| 983 |
+
function getAccountBadge(group, account) {
|
| 984 |
+
const enabled = account.enabled !== false
|
| 985 |
+
const runtime = runtimeStatus[accountId(group, account)] || null
|
| 986 |
+
const isActive = !!(runtime && runtime.is_active)
|
| 987 |
+
const unfreezeAt = asUnix(
|
| 988 |
+
runtime && runtime.unfreeze_at != null ? runtime.unfreeze_at : account.unfreeze_at
|
| 989 |
+
)
|
| 990 |
+
const now = runtimeNow || Math.trunc(Date.now() / 1000)
|
| 991 |
+
const isFrozen = unfreezeAt != null && unfreezeAt > now
|
| 992 |
+
if (!enabled) return { className: 'disabled', text: 'Disabled' }
|
| 993 |
+
if (isActive) return { className: 'active', text: 'Active' }
|
| 994 |
+
if (isFrozen) return { className: 'frozen', text: 'Frozen' }
|
| 995 |
+
return { className: 'idle', text: 'Idle' }
|
| 996 |
+
}
|
| 997 |
+
|
| 998 |
+
function renderAuthSettings(data) {
|
| 999 |
+
authSettings = data || null
|
| 1000 |
+
const apiKeyEl = document.getElementById('globalApiKey')
|
| 1001 |
+
const apiKeyHintEl = document.getElementById('apiKeyHint')
|
| 1002 |
+
const apiKeySourceBadge = document.getElementById('apiKeySourceBadge')
|
| 1003 |
+
const adminPasswordEl = document.getElementById('globalAdminPassword')
|
| 1004 |
+
const adminPasswordHintEl = document.getElementById('adminPasswordHint')
|
| 1005 |
+
const adminPasswordSourceBadge = document.getElementById('adminPasswordSourceBadge')
|
| 1006 |
+
const adminPasswordStateEl = document.getElementById('adminPasswordState')
|
| 1007 |
+
const clearAdminPasswordBtn = document.getElementById('clearAdminPassword')
|
| 1008 |
+
|
| 1009 |
+
if (!data) {
|
| 1010 |
+
apiKeyEl.value = ''
|
| 1011 |
+
apiKeyEl.disabled = true
|
| 1012 |
+
adminPasswordEl.value = ''
|
| 1013 |
+
adminPasswordEl.disabled = true
|
| 1014 |
+
clearAdminPasswordBtn.disabled = true
|
| 1015 |
+
apiKeyHintEl.textContent = 'Failed to load auth settings.'
|
| 1016 |
+
adminPasswordHintEl.textContent = 'Failed to load auth settings.'
|
| 1017 |
+
adminPasswordStateEl.textContent = 'Unavailable'
|
| 1018 |
+
apiKeySourceBadge.innerHTML = '<strong>API keys</strong> Unavailable'
|
| 1019 |
+
adminPasswordSourceBadge.innerHTML = '<strong>Admin password</strong> Unavailable'
|
| 1020 |
+
return
|
| 1021 |
+
}
|
| 1022 |
+
|
| 1023 |
+
apiKeyEl.value = data.api_key || ''
|
| 1024 |
+
apiKeyEl.disabled = false
|
| 1025 |
+
apiKeySourceBadge.innerHTML = `<strong>API keys</strong> ${describeSource(data.api_key_source)}`
|
| 1026 |
+
apiKeyHintEl.textContent = data.api_key_source === 'env'
|
| 1027 |
+
? 'Currently using WEB2API_AUTH_API_KEY as the fallback value. Saving here writes to the database and takes precedence from then on.'
|
| 1028 |
+
: 'Accepts one key per line, or a comma-separated list.'
|
| 1029 |
+
|
| 1030 |
+
adminPasswordEl.value = ''
|
| 1031 |
+
adminPasswordEl.disabled = false
|
| 1032 |
+
clearAdminPasswordBtn.disabled = false
|
| 1033 |
+
adminPasswordSourceBadge.innerHTML = `<strong>Admin password</strong> ${describeSource(
|
| 1034 |
+
data.admin_password_source
|
| 1035 |
+
)}`
|
| 1036 |
+
adminPasswordHintEl.textContent = data.admin_password_source === 'env'
|
| 1037 |
+
? 'Currently using WEB2API_AUTH_CONFIG_SECRET as the fallback value. Saving here writes to the database and takes precedence from then on.'
|
| 1038 |
+
: 'Enter a new password only when you want to rotate it.'
|
| 1039 |
+
adminPasswordStateEl.textContent = data.admin_password_configured
|
| 1040 |
+
? 'Dashboard password is enabled.'
|
| 1041 |
+
: 'Dashboard password is disabled.'
|
| 1042 |
+
}
|
| 1043 |
+
|
| 1044 |
+
function renderModels(metadata) {
|
| 1045 |
+
modelMetadata = metadata || null
|
| 1046 |
+
const list = document.getElementById('modelsList')
|
| 1047 |
+
const mapping = (metadata && metadata.model_mapping) || {}
|
| 1048 |
+
const entries = Object.entries(mapping)
|
| 1049 |
+
if (!entries.length) {
|
| 1050 |
+
list.innerHTML = '<div class="empty">No model metadata available.</div>'
|
| 1051 |
+
return
|
| 1052 |
+
}
|
| 1053 |
+
list.innerHTML = entries
|
| 1054 |
+
.map(
|
| 1055 |
+
([publicModel, upstreamModel]) => `
|
| 1056 |
+
<article class="model-card">
|
| 1057 |
+
<div class="kicker">${
|
| 1058 |
+
publicModel === metadata.default_model ? 'Default model' : 'Available model'
|
| 1059 |
+
}</div>
|
| 1060 |
+
<h3><code>${escapeAttr(publicModel)}</code></h3>
|
| 1061 |
+
<p class="meta">Public model ID accepted by client requests.</p>
|
| 1062 |
+
<p class="meta"><strong>Upstream:</strong> <code>${escapeAttr(upstreamModel)}</code></p>
|
| 1063 |
+
</article>
|
| 1064 |
+
`
|
| 1065 |
+
)
|
| 1066 |
+
.join('')
|
| 1067 |
+
}
|
| 1068 |
+
|
| 1069 |
+
function renderAccount(account, accountIndex, group, onRemove, onChange) {
|
| 1070 |
+
const currentType = account.type || 'claude'
|
| 1071 |
+
const typeOptions = registeredTypes.includes(currentType)
|
| 1072 |
+
? registeredTypes
|
| 1073 |
+
: [currentType].concat(registeredTypes)
|
| 1074 |
+
const badge = getAccountBadge(group, account)
|
| 1075 |
+
const div = document.createElement('div')
|
| 1076 |
+
div.className = 'account'
|
| 1077 |
+
div.dataset.accountIndex = String(accountIndex)
|
| 1078 |
+
div.innerHTML = `
|
| 1079 |
+
<div class="account-status-badge ${badge.className}">${badge.text}</div>
|
| 1080 |
+
<div class="account-row1">
|
| 1081 |
+
<div class="f name">
|
| 1082 |
+
<label>Account name</label>
|
| 1083 |
+
<input type="text" data-k="name" value="${escapeAttr(account.name)}" placeholder="claude-01" />
|
| 1084 |
+
</div>
|
| 1085 |
+
<div class="f type">
|
| 1086 |
+
<label>Provider type</label>
|
| 1087 |
+
<select data-k="type">
|
| 1088 |
+
${typeOptions
|
| 1089 |
+
.map(
|
| 1090 |
+
(typeName) =>
|
| 1091 |
+
`<option value="${escapeAttr(typeName)}" ${
|
| 1092 |
+
typeName === currentType ? 'selected' : ''
|
| 1093 |
+
}>${escapeAttr(typeName)}</option>`
|
| 1094 |
+
)
|
| 1095 |
+
.join('')}
|
| 1096 |
+
</select>
|
| 1097 |
+
</div>
|
| 1098 |
+
<div class="f enabled">
|
| 1099 |
+
<label>Enabled</label>
|
| 1100 |
+
<label class="toggle">
|
| 1101 |
+
<input type="checkbox" data-k="enabled" ${account.enabled !== false ? 'checked' : ''} />
|
| 1102 |
+
<span>Accept traffic</span>
|
| 1103 |
+
</label>
|
| 1104 |
+
</div>
|
| 1105 |
+
<div class="f freeze-time">
|
| 1106 |
+
<label>Unfreeze time</label>
|
| 1107 |
+
<input type="text" data-k="unfreeze_at_display" value="${escapeAttr(
|
| 1108 |
+
formatDateTime(account.unfreeze_at)
|
| 1109 |
+
)}" readonly />
|
| 1110 |
+
</div>
|
| 1111 |
+
<div class="actions">
|
| 1112 |
+
<button type="button" class="btn btn-danger btn-sm">Delete account</button>
|
| 1113 |
+
</div>
|
| 1114 |
+
</div>
|
| 1115 |
+
<div class="account-row2">
|
| 1116 |
+
<div class="f">
|
| 1117 |
+
<label>Auth JSON</label>
|
| 1118 |
+
<textarea data-k="auth" rows="3" placeholder='{"sessionKey": "..."}'>${escapeAttr(
|
| 1119 |
+
authToStr(account.auth)
|
| 1120 |
+
)}</textarea>
|
| 1121 |
+
</div>
|
| 1122 |
+
</div>
|
| 1123 |
+
`
|
| 1124 |
+
div.querySelector('[data-k="name"]').oninput = (event) => onChange(accountIndex, 'name', event.target.value)
|
| 1125 |
+
div.querySelector('[data-k="type"]').onchange = (event) => onChange(accountIndex, 'type', event.target.value)
|
| 1126 |
+
div.querySelector('[data-k="enabled"]').onchange = (event) =>
|
| 1127 |
+
onChange(accountIndex, 'enabled', event.target.checked, true)
|
| 1128 |
+
div.querySelector('[data-k="auth"]').oninput = (event) => onChange(accountIndex, 'auth', event.target.value)
|
| 1129 |
+
div.querySelector('.btn-danger').onclick = () => onRemove(accountIndex)
|
| 1130 |
+
return div
|
| 1131 |
+
}
|
| 1132 |
+
|
| 1133 |
+
function groupKey(group, index) {
|
| 1134 |
+
const host = (group.proxy_host || '').trim()
|
| 1135 |
+
const user = (group.proxy_user || '').trim()
|
| 1136 |
+
return host || user ? `${host}|${user}` : `idx-${index}`
|
| 1137 |
+
}
|
| 1138 |
+
|
| 1139 |
+
function filterConfig(cfg, search) {
|
| 1140 |
+
const query = (search || '').trim().toLowerCase()
|
| 1141 |
+
if (!query) return cfg.map((group, i) => ({ group, groupIndex: i, accountIndices: null }))
|
| 1142 |
+
const result = []
|
| 1143 |
+
cfg.forEach((group, groupIndex) => {
|
| 1144 |
+
const matchHost = (group.proxy_host || '').toLowerCase().includes(query)
|
| 1145 |
+
const matchUser = (group.proxy_user || '').toLowerCase().includes(query)
|
| 1146 |
+
const allAccounts = group.accounts || []
|
| 1147 |
+
const accountIndices = allAccounts
|
| 1148 |
+
.map((_, i) => i)
|
| 1149 |
+
.filter((i) => {
|
| 1150 |
+
const account = allAccounts[i]
|
| 1151 |
+
return (
|
| 1152 |
+
(account.name || '').toLowerCase().includes(query) ||
|
| 1153 |
+
(account.type || '').toLowerCase().includes(query)
|
| 1154 |
+
)
|
| 1155 |
+
})
|
| 1156 |
+
if (matchHost || matchUser || accountIndices.length > 0) {
|
| 1157 |
+
result.push({
|
| 1158 |
+
group,
|
| 1159 |
+
groupIndex,
|
| 1160 |
+
accountIndices: accountIndices.length ? accountIndices : null,
|
| 1161 |
+
})
|
| 1162 |
+
}
|
| 1163 |
+
})
|
| 1164 |
+
return result
|
| 1165 |
+
}
|
| 1166 |
+
|
| 1167 |
+
function renderGroup(
|
| 1168 |
+
group,
|
| 1169 |
+
groupIndex,
|
| 1170 |
+
onRemoveGroup,
|
| 1171 |
+
onAddAccount,
|
| 1172 |
+
onRemoveAccount,
|
| 1173 |
+
onAccountChange,
|
| 1174 |
+
onGroupChange,
|
| 1175 |
+
opts
|
| 1176 |
+
) {
|
| 1177 |
+
const valueOf = (key, fallback = '') => (group[key] != null && group[key] !== '' ? group[key] : fallback)
|
| 1178 |
+
const useProxy = group.use_proxy !== false
|
| 1179 |
+
const gkey = groupKey(group, groupIndex)
|
| 1180 |
+
const allAccounts = group.accounts || []
|
| 1181 |
+
const indices = opts.accountIndices || allAccounts.map((_, i) => i)
|
| 1182 |
+
const totalCount = indices.length
|
| 1183 |
+
const isCollapsed = groupCollapsed[gkey] === true
|
| 1184 |
+
const pageSize = groupPageSize[gkey] || DEFAULT_PAGE_SIZE
|
| 1185 |
+
const totalPages = Math.max(1, Math.ceil(totalCount / pageSize))
|
| 1186 |
+
const currentPage = Math.min(groupPage[gkey] || 0, totalPages - 1)
|
| 1187 |
+
const start = currentPage * pageSize
|
| 1188 |
+
const pageIndices = indices.slice(start, start + pageSize)
|
| 1189 |
+
const pageAccounts = pageIndices.map((i) => allAccounts[i])
|
| 1190 |
+
|
| 1191 |
+
const div = document.createElement('div')
|
| 1192 |
+
div.className = 'group' + (isCollapsed ? ' collapsed' : '')
|
| 1193 |
+
div.dataset.groupIndex = String(groupIndex)
|
| 1194 |
+
div.innerHTML = `
|
| 1195 |
+
<div class="group-header" data-action="toggle">
|
| 1196 |
+
<div>
|
| 1197 |
+
<h3>
|
| 1198 |
+
<span class="chevron">▼</span>
|
| 1199 |
+
Proxy group ${groupIndex + 1}
|
| 1200 |
+
</h3>
|
| 1201 |
+
<p class="meta">${escapeAttr(
|
| 1202 |
+
useProxy ? group.proxy_host || 'Proxy not set' : 'Direct connection'
|
| 1203 |
+
)} · ${allAccounts.length} accounts</p>
|
| 1204 |
+
</div>
|
| 1205 |
+
<div class="group-actions" onclick="event.stopPropagation()">
|
| 1206 |
+
<button type="button" class="btn btn-sm" data-action="add-account">Add account</button>
|
| 1207 |
+
<button type="button" class="btn btn-danger btn-sm" data-action="remove-group">Delete group</button>
|
| 1208 |
+
</div>
|
| 1209 |
+
</div>
|
| 1210 |
+
<div class="group-body">
|
| 1211 |
+
${
|
| 1212 |
+
useProxy
|
| 1213 |
+
? ''
|
| 1214 |
+
: '<div class="warn">This group is using a direct connection. The browser will use this machine\'s own exit IP. Avoid packing many accounts into one direct group.</div>'
|
| 1215 |
+
}
|
| 1216 |
+
<div class="row proxy-one-row">
|
| 1217 |
+
<div>
|
| 1218 |
+
<label>Use proxy</label>
|
| 1219 |
+
<label class="toggle">
|
| 1220 |
+
<input type="checkbox" data-k="use_proxy" ${useProxy ? 'checked' : ''} />
|
| 1221 |
+
<span>Use proxy</span>
|
| 1222 |
+
</label>
|
| 1223 |
+
</div>
|
| 1224 |
+
<div>
|
| 1225 |
+
<label>Proxy host</label>
|
| 1226 |
+
<input type="text" data-k="proxy_host" value="${escapeAttr(valueOf('proxy_host'))}" placeholder="sg.arxlabs.io:3010" ${
|
| 1227 |
+
useProxy ? '' : 'disabled'
|
| 1228 |
+
} />
|
| 1229 |
+
</div>
|
| 1230 |
+
<div>
|
| 1231 |
+
<label>Proxy user</label>
|
| 1232 |
+
<input type="text" data-k="proxy_user" value="${escapeAttr(valueOf('proxy_user'))}" placeholder="Proxy username" ${
|
| 1233 |
+
useProxy ? '' : 'disabled'
|
| 1234 |
+
} />
|
| 1235 |
+
</div>
|
| 1236 |
+
<div>
|
| 1237 |
+
<label>Proxy password</label>
|
| 1238 |
+
<input type="text" data-k="proxy_pass" value="${escapeAttr(valueOf('proxy_pass'))}" placeholder="Proxy password" ${
|
| 1239 |
+
useProxy ? '' : 'disabled'
|
| 1240 |
+
} />
|
| 1241 |
+
</div>
|
| 1242 |
+
<div>
|
| 1243 |
+
<label>Fingerprint ID</label>
|
| 1244 |
+
<input type="text" data-k="fingerprint_id" value="${escapeAttr(valueOf('fingerprint_id'))}" placeholder="4567" />
|
| 1245 |
+
</div>
|
| 1246 |
+
<div>
|
| 1247 |
+
<label>Timezone</label>
|
| 1248 |
+
<select data-k="timezone">${(() => {
|
| 1249 |
+
const value = valueOf('timezone') || 'America/Chicago'
|
| 1250 |
+
const options = TIMEZONES.includes(value) ? TIMEZONES : [value, ...TIMEZONES].sort()
|
| 1251 |
+
return options
|
| 1252 |
+
.map(
|
| 1253 |
+
(tz) =>
|
| 1254 |
+
`<option value="${escapeAttr(tz)}" ${tz === value ? 'selected' : ''}>${escapeAttr(tz)}</option>`
|
| 1255 |
+
)
|
| 1256 |
+
.join('')
|
| 1257 |
+
})()}</select>
|
| 1258 |
+
</div>
|
| 1259 |
+
</div>
|
| 1260 |
+
<div class="accounts">
|
| 1261 |
+
<div class="accounts-head">
|
| 1262 |
+
<h4>Accounts</h4>
|
| 1263 |
+
<p class="meta">name / type / auth JSON</p>
|
| 1264 |
+
</div>
|
| 1265 |
+
<div class="accounts-list"></div>
|
| 1266 |
+
${
|
| 1267 |
+
totalPages > 1
|
| 1268 |
+
? `
|
| 1269 |
+
<div class="pagination">
|
| 1270 |
+
<span class="info">Page ${currentPage + 1} of ${totalPages} · ${totalCount} accounts</span>
|
| 1271 |
+
<select data-action="page-size">
|
| 1272 |
+
${PAGE_SIZE_OPTIONS.map(
|
| 1273 |
+
(n) => `<option value="${n}" ${n === pageSize ? 'selected' : ''}>${n} / page</option>`
|
| 1274 |
+
).join('')}
|
| 1275 |
+
</select>
|
| 1276 |
+
<button type="button" class="btn btn-sm" data-action="prev" ${
|
| 1277 |
+
currentPage <= 0 ? 'disabled' : ''
|
| 1278 |
+
}>Previous</button>
|
| 1279 |
+
<button type="button" class="btn btn-sm" data-action="next" ${
|
| 1280 |
+
currentPage >= totalPages - 1 ? 'disabled' : ''
|
| 1281 |
+
}>Next</button>
|
| 1282 |
+
</div>
|
| 1283 |
+
`
|
| 1284 |
+
: ''
|
| 1285 |
+
}
|
| 1286 |
+
</div>
|
| 1287 |
+
</div>
|
| 1288 |
+
`
|
| 1289 |
+
|
| 1290 |
+
div.querySelector('[data-action="toggle"]').onclick = () => {
|
| 1291 |
+
groupCollapsed[gkey] = !groupCollapsed[gkey]
|
| 1292 |
+
opts.rerender()
|
| 1293 |
+
}
|
| 1294 |
+
|
| 1295 |
+
;['proxy_host', 'proxy_user', 'proxy_pass', 'fingerprint_id'].forEach((key) => {
|
| 1296 |
+
const element = div.querySelector(`[data-k="${key}"]`)
|
| 1297 |
+
if (element) element.oninput = (event) => onGroupChange(groupIndex, key, event.target.value)
|
| 1298 |
+
})
|
| 1299 |
+
|
| 1300 |
+
const useProxyEl = div.querySelector('[data-k="use_proxy"]')
|
| 1301 |
+
if (useProxyEl) {
|
| 1302 |
+
useProxyEl.onchange = (event) => onGroupChange(groupIndex, 'use_proxy', event.target.checked, true)
|
| 1303 |
+
}
|
| 1304 |
+
|
| 1305 |
+
const timezoneEl = div.querySelector('[data-k="timezone"]')
|
| 1306 |
+
if (timezoneEl) timezoneEl.onchange = (event) => onGroupChange(groupIndex, 'timezone', event.target.value)
|
| 1307 |
+
|
| 1308 |
+
div.querySelector('[data-action="add-account"]').onclick = () => onAddAccount(groupIndex)
|
| 1309 |
+
div.querySelector('[data-action="remove-group"]').onclick = () => onRemoveGroup(groupIndex)
|
| 1310 |
+
|
| 1311 |
+
const pageSizeEl = div.querySelector('[data-action="page-size"]')
|
| 1312 |
+
if (pageSizeEl) {
|
| 1313 |
+
pageSizeEl.onchange = (event) => {
|
| 1314 |
+
groupPageSize[gkey] = Number(event.target.value)
|
| 1315 |
+
groupPage[gkey] = 0
|
| 1316 |
+
opts.rerender()
|
| 1317 |
+
}
|
| 1318 |
+
}
|
| 1319 |
+
|
| 1320 |
+
const prevEl = div.querySelector('[data-action="prev"]')
|
| 1321 |
+
if (prevEl) {
|
| 1322 |
+
prevEl.onclick = () => {
|
| 1323 |
+
if (currentPage > 0) {
|
| 1324 |
+
groupPage[gkey] = currentPage - 1
|
| 1325 |
+
opts.rerender()
|
| 1326 |
+
}
|
| 1327 |
+
}
|
| 1328 |
+
}
|
| 1329 |
+
|
| 1330 |
+
const nextEl = div.querySelector('[data-action="next"]')
|
| 1331 |
+
if (nextEl) {
|
| 1332 |
+
nextEl.onclick = () => {
|
| 1333 |
+
if (currentPage < totalPages - 1) {
|
| 1334 |
+
groupPage[gkey] = currentPage + 1
|
| 1335 |
+
opts.rerender()
|
| 1336 |
+
}
|
| 1337 |
+
}
|
| 1338 |
+
}
|
| 1339 |
+
|
| 1340 |
+
const list = div.querySelector('.accounts-list')
|
| 1341 |
+
pageAccounts.forEach((account, localIndex) => {
|
| 1342 |
+
const globalIndex = pageIndices[localIndex]
|
| 1343 |
+
list.appendChild(
|
| 1344 |
+
renderAccount(
|
| 1345 |
+
account,
|
| 1346 |
+
globalIndex,
|
| 1347 |
+
group,
|
| 1348 |
+
onRemoveAccount.bind(null, groupIndex),
|
| 1349 |
+
onAccountChange.bind(null, groupIndex)
|
| 1350 |
+
)
|
| 1351 |
+
)
|
| 1352 |
+
})
|
| 1353 |
+
|
| 1354 |
+
return div
|
| 1355 |
+
}
|
| 1356 |
+
|
| 1357 |
+
function getConfigFromForm() {
|
| 1358 |
+
return (config || [])
|
| 1359 |
+
.map((group) => ({
|
| 1360 |
+
use_proxy: group.use_proxy !== false,
|
| 1361 |
+
proxy_host: (group.proxy_host || '').trim(),
|
| 1362 |
+
proxy_user: (group.proxy_user || '').trim(),
|
| 1363 |
+
proxy_pass: (group.proxy_pass || '').trim(),
|
| 1364 |
+
fingerprint_id: (group.fingerprint_id || '').trim(),
|
| 1365 |
+
timezone: (group.timezone || '').trim() || undefined,
|
| 1366 |
+
accounts: (group.accounts || [])
|
| 1367 |
+
.map((account) => {
|
| 1368 |
+
const name = (account.name || '').trim()
|
| 1369 |
+
if (!name) return null
|
| 1370 |
+
const normalized = {
|
| 1371 |
+
name,
|
| 1372 |
+
type: (account.type || 'claude').trim() || 'claude',
|
| 1373 |
+
auth: typeof account.auth === 'string' ? parseAuth(account.auth) : account.auth || {},
|
| 1374 |
+
enabled: account.enabled !== false,
|
| 1375 |
+
}
|
| 1376 |
+
const unfreezeAt = asUnix(account.unfreeze_at)
|
| 1377 |
+
if (unfreezeAt != null) normalized.unfreeze_at = unfreezeAt
|
| 1378 |
+
return normalized
|
| 1379 |
+
})
|
| 1380 |
+
.filter(Boolean),
|
| 1381 |
+
}))
|
| 1382 |
+
.filter((group) => group.accounts.length)
|
| 1383 |
+
}
|
| 1384 |
+
|
| 1385 |
+
function updateAccountStatusBadges() {
|
| 1386 |
+
document.querySelectorAll('.group').forEach((groupEl) => {
|
| 1387 |
+
const groupIndex = Number(groupEl.dataset.groupIndex)
|
| 1388 |
+
const group = config[groupIndex]
|
| 1389 |
+
if (!group) return
|
| 1390 |
+
groupEl.querySelectorAll('.account').forEach((accountEl) => {
|
| 1391 |
+
const accountIndex = Number(accountEl.dataset.accountIndex)
|
| 1392 |
+
const account = (group.accounts || [])[accountIndex]
|
| 1393 |
+
if (!account) return
|
| 1394 |
+
const badge = getAccountBadge(group, account)
|
| 1395 |
+
const badgeEl = accountEl.querySelector('.account-status-badge')
|
| 1396 |
+
if (badgeEl) {
|
| 1397 |
+
badgeEl.className = `account-status-badge ${badge.className}`
|
| 1398 |
+
badgeEl.textContent = badge.text
|
| 1399 |
+
}
|
| 1400 |
+
const unfreezeEl = accountEl.querySelector('[data-k="unfreeze_at_display"]')
|
| 1401 |
+
if (unfreezeEl) {
|
| 1402 |
+
const runtime = runtimeStatus[accountId(group, account)] || null
|
| 1403 |
+
const value = runtime && runtime.unfreeze_at != null ? runtime.unfreeze_at : account.unfreeze_at
|
| 1404 |
+
unfreezeEl.value = formatDateTime(value)
|
| 1405 |
+
}
|
| 1406 |
+
})
|
| 1407 |
+
})
|
| 1408 |
+
}
|
| 1409 |
+
|
| 1410 |
+
function render(configData) {
|
| 1411 |
+
config = JSON.parse(JSON.stringify(configData || []))
|
| 1412 |
+
const searchText = (document.getElementById('search') && document.getElementById('search').value) || ''
|
| 1413 |
+
const filtered = filterConfig(config, searchText)
|
| 1414 |
+
const list = document.getElementById('list')
|
| 1415 |
+
list.innerHTML = ''
|
| 1416 |
+
|
| 1417 |
+
const totalGroups = config.length
|
| 1418 |
+
const totalAccounts = config.reduce((count, group) => count + (group.accounts || []).length, 0)
|
| 1419 |
+
const statsEl = document.getElementById('stats')
|
| 1420 |
+
statsEl.textContent = searchText.trim()
|
| 1421 |
+
? `${filtered.length} matching groups · ${totalGroups} total groups · ${totalAccounts} accounts`
|
| 1422 |
+
: `${totalGroups} groups · ${totalAccounts} accounts`
|
| 1423 |
+
|
| 1424 |
+
function rerender() {
|
| 1425 |
+
render(config)
|
| 1426 |
+
}
|
| 1427 |
+
|
| 1428 |
+
function onGroupChange(groupIndex, key, value, shouldRerender = false) {
|
| 1429 |
+
if (config[groupIndex]) config[groupIndex][key] = value
|
| 1430 |
+
if (shouldRerender) rerender()
|
| 1431 |
+
}
|
| 1432 |
+
|
| 1433 |
+
function onAddAccount(groupIndex) {
|
| 1434 |
+
if (!config[groupIndex].accounts) config[groupIndex].accounts = []
|
| 1435 |
+
config[groupIndex].accounts.push({
|
| 1436 |
+
name: '',
|
| 1437 |
+
type: 'claude',
|
| 1438 |
+
auth: {},
|
| 1439 |
+
enabled: true,
|
| 1440 |
+
unfreeze_at: null,
|
| 1441 |
+
})
|
| 1442 |
+
rerender()
|
| 1443 |
+
}
|
| 1444 |
+
|
| 1445 |
+
function onRemoveAccount(groupIndex, accountIndex) {
|
| 1446 |
+
config[groupIndex].accounts.splice(accountIndex, 1)
|
| 1447 |
+
rerender()
|
| 1448 |
+
}
|
| 1449 |
+
|
| 1450 |
+
function onAccountChange(groupIndex, accountIndex, key, value) {
|
| 1451 |
+
if (!config[groupIndex].accounts[accountIndex]) return
|
| 1452 |
+
if (key === 'auth') config[groupIndex].accounts[accountIndex].auth = parseAuth(value)
|
| 1453 |
+
else config[groupIndex].accounts[accountIndex][key] = value
|
| 1454 |
+
if (key === 'enabled') rerender()
|
| 1455 |
+
}
|
| 1456 |
+
|
| 1457 |
+
function onRemoveGroup(groupIndex) {
|
| 1458 |
+
config.splice(groupIndex, 1)
|
| 1459 |
+
rerender()
|
| 1460 |
+
}
|
| 1461 |
+
|
| 1462 |
+
if (!filtered.length) {
|
| 1463 |
+
list.innerHTML = '<div class="empty">No groups match the current filter.</div>'
|
| 1464 |
+
return
|
| 1465 |
+
}
|
| 1466 |
+
|
| 1467 |
+
filtered.forEach(({ group, groupIndex, accountIndices }) => {
|
| 1468 |
+
list.appendChild(
|
| 1469 |
+
renderGroup(group, groupIndex, onRemoveGroup, onAddAccount, onRemoveAccount, onAccountChange, onGroupChange, {
|
| 1470 |
+
rerender,
|
| 1471 |
+
accountIndices,
|
| 1472 |
+
})
|
| 1473 |
+
)
|
| 1474 |
+
})
|
| 1475 |
+
updateAccountStatusBadges()
|
| 1476 |
+
}
|
| 1477 |
+
|
| 1478 |
+
async function refreshConfigAndStatus(showLoadedMessage = false) {
|
| 1479 |
+
const [data, status] = await Promise.all([
|
| 1480 |
+
loadConfig(),
|
| 1481 |
+
loadStatus().catch(() => ({ accounts: {}, now: Math.trunc(Date.now() / 1000) })),
|
| 1482 |
+
])
|
| 1483 |
+
applyRuntimeStatus(status)
|
| 1484 |
+
render(data)
|
| 1485 |
+
if (showLoadedMessage) showMsg('Configuration reloaded.')
|
| 1486 |
+
}
|
| 1487 |
+
|
| 1488 |
+
async function saveAuthSettings() {
|
| 1489 |
+
const button = document.getElementById('saveAuthSettings')
|
| 1490 |
+
if (!authSettings) {
|
| 1491 |
+
showMsg('Auth settings are not loaded yet.', 'error')
|
| 1492 |
+
return
|
| 1493 |
+
}
|
| 1494 |
+
const payload = {}
|
| 1495 |
+
payload.api_key = document.getElementById('globalApiKey').value
|
| 1496 |
+
const password = document.getElementById('globalAdminPassword').value.trim()
|
| 1497 |
+
if (password) payload.admin_password = password
|
| 1498 |
+
if (!Object.keys(payload).length) {
|
| 1499 |
+
showMsg('Nothing to save.')
|
| 1500 |
+
return
|
| 1501 |
+
}
|
| 1502 |
+
|
| 1503 |
+
setButtonBusy(button, true, 'Saving…', 'Save auth settings')
|
| 1504 |
+
try {
|
| 1505 |
+
const res = await apiFetch(AUTH_SETTINGS_API, {
|
| 1506 |
+
method: 'PUT',
|
| 1507 |
+
headers: { 'Content-Type': 'application/json' },
|
| 1508 |
+
body: JSON.stringify(payload),
|
| 1509 |
+
})
|
| 1510 |
+
const parsed = await parseResponse(res)
|
| 1511 |
+
if (!res.ok) {
|
| 1512 |
+
throw new Error((parsed.data && parsed.data.detail) || parsed.text || res.statusText)
|
| 1513 |
+
}
|
| 1514 |
+
renderAuthSettings((parsed.data && parsed.data.settings) || authSettings)
|
| 1515 |
+
document.getElementById('globalAdminPassword').value = ''
|
| 1516 |
+
if (payload.admin_password) {
|
| 1517 |
+
showMsg('Admin password updated. Please sign in again.')
|
| 1518 |
+
setTimeout(() => {
|
| 1519 |
+
window.location.href = '/login'
|
| 1520 |
+
}, 900)
|
| 1521 |
+
return
|
| 1522 |
+
}
|
| 1523 |
+
showMsg('Auth settings saved.')
|
| 1524 |
+
} catch (error) {
|
| 1525 |
+
showMsg((error && error.message) || 'Failed to save auth settings.', 'error')
|
| 1526 |
+
} finally {
|
| 1527 |
+
setButtonBusy(button, false, 'Saving…', 'Save auth settings')
|
| 1528 |
+
}
|
| 1529 |
+
}
|
| 1530 |
+
|
| 1531 |
+
async function disableDashboardPassword() {
|
| 1532 |
+
const button = document.getElementById('clearAdminPassword')
|
| 1533 |
+
if (!authSettings) return
|
| 1534 |
+
const confirmed = window.confirm(
|
| 1535 |
+
'Disable the dashboard password? The config dashboard will no longer require sign-in until a new password is set.'
|
| 1536 |
+
)
|
| 1537 |
+
if (!confirmed) return
|
| 1538 |
+
|
| 1539 |
+
setButtonBusy(button, true, 'Disabling…', 'Disable dashboard password')
|
| 1540 |
+
try {
|
| 1541 |
+
const res = await apiFetch(AUTH_SETTINGS_API, {
|
| 1542 |
+
method: 'PUT',
|
| 1543 |
+
headers: { 'Content-Type': 'application/json' },
|
| 1544 |
+
body: JSON.stringify({ admin_password: '' }),
|
| 1545 |
+
})
|
| 1546 |
+
const parsed = await parseResponse(res)
|
| 1547 |
+
if (!res.ok) {
|
| 1548 |
+
throw new Error((parsed.data && parsed.data.detail) || parsed.text || res.statusText)
|
| 1549 |
+
}
|
| 1550 |
+
showMsg('Dashboard password disabled. Redirecting…')
|
| 1551 |
+
setTimeout(() => {
|
| 1552 |
+
window.location.href = '/'
|
| 1553 |
+
}, 900)
|
| 1554 |
+
} catch (error) {
|
| 1555 |
+
showMsg((error && error.message) || 'Failed to disable dashboard password.', 'error')
|
| 1556 |
+
} finally {
|
| 1557 |
+
setButtonBusy(button, false, 'Disabling…', 'Disable dashboard password')
|
| 1558 |
+
}
|
| 1559 |
+
}
|
| 1560 |
+
|
| 1561 |
+
document.getElementById('addGroup').onclick = () => {
|
| 1562 |
+
config.push({
|
| 1563 |
+
use_proxy: true,
|
| 1564 |
+
proxy_host: '',
|
| 1565 |
+
proxy_user: '',
|
| 1566 |
+
proxy_pass: '',
|
| 1567 |
+
fingerprint_id: '',
|
| 1568 |
+
timezone: 'America/Chicago',
|
| 1569 |
+
accounts: [{ name: '', type: 'claude', auth: {}, enabled: true, unfreeze_at: null }],
|
| 1570 |
+
})
|
| 1571 |
+
render(config)
|
| 1572 |
+
}
|
| 1573 |
+
|
| 1574 |
+
document.getElementById('load').onclick = async () => {
|
| 1575 |
+
try {
|
| 1576 |
+
await Promise.all([loadTypes(), refreshConfigAndStatus(false)])
|
| 1577 |
+
const [settings, metadata] = await Promise.all([
|
| 1578 |
+
loadAuthSettings().catch(() => null),
|
| 1579 |
+
loadModelMetadata().catch(() => null),
|
| 1580 |
+
])
|
| 1581 |
+
renderAuthSettings(settings)
|
| 1582 |
+
renderModels(metadata)
|
| 1583 |
+
showMsg('Configuration reloaded.')
|
| 1584 |
+
} catch (error) {
|
| 1585 |
+
showMsg((error && error.message) || 'Failed to reload configuration.', 'error')
|
| 1586 |
+
}
|
| 1587 |
+
}
|
| 1588 |
+
|
| 1589 |
+
document.getElementById('save').onclick = async () => {
|
| 1590 |
+
const toSave = getConfigFromForm()
|
| 1591 |
+
if (!toSave.length) {
|
| 1592 |
+
showMsg('Keep at least one proxy group with at least one account.', 'error')
|
| 1593 |
+
return
|
| 1594 |
+
}
|
| 1595 |
+
const button = document.getElementById('save')
|
| 1596 |
+
setButtonBusy(button, true, 'Saving…', 'Save config')
|
| 1597 |
+
try {
|
| 1598 |
+
const res = await apiFetch(API, {
|
| 1599 |
+
method: 'PUT',
|
| 1600 |
+
headers: { 'Content-Type': 'application/json' },
|
| 1601 |
+
body: JSON.stringify(toSave),
|
| 1602 |
+
})
|
| 1603 |
+
const parsed = await parseResponse(res)
|
| 1604 |
+
if (!res.ok) {
|
| 1605 |
+
throw new Error((parsed.data && parsed.data.detail) || parsed.text || res.statusText)
|
| 1606 |
+
}
|
| 1607 |
+
showMsg((parsed.data && parsed.data.message) || 'Configuration saved and applied.')
|
| 1608 |
+
await refreshConfigAndStatus(false)
|
| 1609 |
+
} catch (error) {
|
| 1610 |
+
showMsg((error && error.message) || 'Failed to save configuration.', 'error')
|
| 1611 |
+
} finally {
|
| 1612 |
+
setButtonBusy(button, false, 'Saving…', 'Save config')
|
| 1613 |
+
}
|
| 1614 |
+
}
|
| 1615 |
+
|
| 1616 |
+
document.getElementById('saveAuthSettings').onclick = () => void saveAuthSettings()
|
| 1617 |
+
document.getElementById('clearAdminPassword').onclick = () => void disableDashboardPassword()
|
| 1618 |
+
|
| 1619 |
+
document.getElementById('logout').onclick = async () => {
|
| 1620 |
+
try {
|
| 1621 |
+
await fetch('/api/admin/logout', { method: 'POST' })
|
| 1622 |
+
} catch (_) {}
|
| 1623 |
+
window.location.href = '/login'
|
| 1624 |
+
}
|
| 1625 |
+
|
| 1626 |
+
let searchDebounce
|
| 1627 |
+
document.getElementById('search').oninput = () => {
|
| 1628 |
+
clearTimeout(searchDebounce)
|
| 1629 |
+
searchDebounce = setTimeout(() => render(config), 180)
|
| 1630 |
+
}
|
| 1631 |
+
|
| 1632 |
+
// Pro models toggle
|
| 1633 |
+
async function loadProModels() {
|
| 1634 |
+
try {
|
| 1635 |
+
const res = await apiFetch('/api/config/pro-models')
|
| 1636 |
+
const data = await res.json()
|
| 1637 |
+
const toggle = document.getElementById('proModelsToggle')
|
| 1638 |
+
const status = document.getElementById('proModelsStatus')
|
| 1639 |
+
toggle.checked = !!data.enabled
|
| 1640 |
+
status.textContent = data.enabled ? 'Enabled' : 'Disabled'
|
| 1641 |
+
status.style.color = data.enabled ? '#22c55e' : '#888'
|
| 1642 |
+
} catch (_) {
|
| 1643 |
+
document.getElementById('proModelsStatus').textContent = 'Failed to load'
|
| 1644 |
+
}
|
| 1645 |
+
}
|
| 1646 |
+
document.getElementById('proModelsToggle').onchange = async function () {
|
| 1647 |
+
const enabled = this.checked
|
| 1648 |
+
const status = document.getElementById('proModelsStatus')
|
| 1649 |
+
status.textContent = 'Saving…'
|
| 1650 |
+
try {
|
| 1651 |
+
const res = await apiFetch('/api/config/pro-models', {
|
| 1652 |
+
method: 'PUT',
|
| 1653 |
+
headers: { 'Content-Type': 'application/json' },
|
| 1654 |
+
body: JSON.stringify({ enabled }),
|
| 1655 |
+
})
|
| 1656 |
+
const data = await res.json()
|
| 1657 |
+
status.textContent = data.enabled ? 'Enabled' : 'Disabled'
|
| 1658 |
+
status.style.color = data.enabled ? '#22c55e' : '#888'
|
| 1659 |
+
showMsg(data.enabled ? 'Pro models enabled.' : 'Pro models disabled.')
|
| 1660 |
+
} catch (e) {
|
| 1661 |
+
this.checked = !enabled
|
| 1662 |
+
status.textContent = 'Error'
|
| 1663 |
+
showMsg('Failed to update Pro models setting.', 'error')
|
| 1664 |
+
}
|
| 1665 |
+
}
|
| 1666 |
+
|
| 1667 |
+
;(async () => {
|
| 1668 |
+
await loadTypes()
|
| 1669 |
+
try {
|
| 1670 |
+
const [data, status, settings, metadata] = await Promise.all([
|
| 1671 |
+
loadConfig(),
|
| 1672 |
+
loadStatus().catch(() => ({ accounts: {}, now: Math.trunc(Date.now() / 1000) })),
|
| 1673 |
+
loadAuthSettings().catch(() => null),
|
| 1674 |
+
loadModelMetadata().catch(() => null),
|
| 1675 |
+
])
|
| 1676 |
+
loadProModels().catch(() => null)
|
| 1677 |
+
applyRuntimeStatus(status)
|
| 1678 |
+
renderAuthSettings(settings)
|
| 1679 |
+
renderModels(metadata)
|
| 1680 |
+
render(data)
|
| 1681 |
+
} catch (error) {
|
| 1682 |
+
renderAuthSettings(null)
|
| 1683 |
+
renderModels(null)
|
| 1684 |
+
render([])
|
| 1685 |
+
showMsg((error && error.message) || 'No configuration was loaded yet.', 'error')
|
| 1686 |
+
}
|
| 1687 |
+
})()
|
| 1688 |
+
|
| 1689 |
+
setInterval(async () => {
|
| 1690 |
+
if (document.hidden) return
|
| 1691 |
+
try {
|
| 1692 |
+
applyRuntimeStatus(await loadStatus())
|
| 1693 |
+
updateAccountStatusBadges()
|
| 1694 |
+
} catch (_) {}
|
| 1695 |
+
}, 15000)
|
| 1696 |
+
</script>
|
| 1697 |
+
</body>
|
| 1698 |
+
</html>
|
core/static/index.html
ADDED
|
@@ -0,0 +1,474 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8" />
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
| 6 |
+
<title>Web2API</title>
|
| 7 |
+
<style>
|
| 8 |
+
:root {
|
| 9 |
+
--bg: #120f0d;
|
| 10 |
+
--bg-deep: #090705;
|
| 11 |
+
--panel: rgba(28, 23, 20, 0.84);
|
| 12 |
+
--panel-strong: rgba(37, 31, 27, 0.94);
|
| 13 |
+
--line: rgba(247, 239, 230, 0.12);
|
| 14 |
+
--text: #f7efe6;
|
| 15 |
+
--muted: #b8ab99;
|
| 16 |
+
--accent: #e5c49a;
|
| 17 |
+
--accent-strong: #f2ddc3;
|
| 18 |
+
--success: #a9d7b8;
|
| 19 |
+
--shadow: 0 32px 100px rgba(0, 0, 0, 0.42);
|
| 20 |
+
--radius: 28px;
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
* {
|
| 24 |
+
box-sizing: border-box;
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
body {
|
| 28 |
+
margin: 0;
|
| 29 |
+
min-height: 100vh;
|
| 30 |
+
color: var(--text);
|
| 31 |
+
font-family: "Avenir Next", "Segoe UI", ui-sans-serif, system-ui, sans-serif;
|
| 32 |
+
background:
|
| 33 |
+
radial-gradient(circle at top left, rgba(229, 196, 154, 0.16), transparent 32%),
|
| 34 |
+
radial-gradient(circle at 85% 10%, rgba(163, 120, 76, 0.18), transparent 22%),
|
| 35 |
+
linear-gradient(180deg, #171210 0%, var(--bg) 42%, var(--bg-deep) 100%);
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
body::before {
|
| 39 |
+
content: "";
|
| 40 |
+
position: fixed;
|
| 41 |
+
inset: 0;
|
| 42 |
+
pointer-events: none;
|
| 43 |
+
opacity: 0.18;
|
| 44 |
+
background-image: linear-gradient(rgba(255, 255, 255, 0.03) 1px, transparent 1px);
|
| 45 |
+
background-size: 100% 3px;
|
| 46 |
+
mix-blend-mode: soft-light;
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
a {
|
| 50 |
+
color: inherit;
|
| 51 |
+
text-decoration: none;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
code,
|
| 55 |
+
pre {
|
| 56 |
+
font-family: "SFMono-Regular", "JetBrains Mono", "Cascadia Code", ui-monospace, monospace;
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
.shell {
|
| 60 |
+
width: min(1120px, calc(100vw - 32px));
|
| 61 |
+
margin: 0 auto;
|
| 62 |
+
padding: 28px 0 48px;
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
.panel {
|
| 66 |
+
position: relative;
|
| 67 |
+
overflow: hidden;
|
| 68 |
+
background: linear-gradient(180deg, rgba(43, 36, 31, 0.88), rgba(26, 21, 18, 0.9));
|
| 69 |
+
border: 1px solid var(--line);
|
| 70 |
+
border-radius: var(--radius);
|
| 71 |
+
box-shadow: var(--shadow);
|
| 72 |
+
backdrop-filter: blur(18px);
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
.panel::after {
|
| 76 |
+
content: "";
|
| 77 |
+
position: absolute;
|
| 78 |
+
inset: auto -10% 0;
|
| 79 |
+
height: 1px;
|
| 80 |
+
background: linear-gradient(90deg, transparent, rgba(242, 221, 195, 0.22), transparent);
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
.hero {
|
| 84 |
+
padding: 34px;
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
.eyebrow {
|
| 88 |
+
display: inline-flex;
|
| 89 |
+
align-items: center;
|
| 90 |
+
gap: 10px;
|
| 91 |
+
padding: 8px 14px;
|
| 92 |
+
border-radius: 999px;
|
| 93 |
+
border: 1px solid rgba(229, 196, 154, 0.2);
|
| 94 |
+
background: rgba(229, 196, 154, 0.08);
|
| 95 |
+
color: var(--accent);
|
| 96 |
+
font-size: 11px;
|
| 97 |
+
letter-spacing: 0.18em;
|
| 98 |
+
font-weight: 700;
|
| 99 |
+
text-transform: uppercase;
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
h1,
|
| 103 |
+
h2,
|
| 104 |
+
h3 {
|
| 105 |
+
font-family: "Iowan Old Style", "Palatino Linotype", "Book Antiqua", Georgia, serif;
|
| 106 |
+
letter-spacing: -0.03em;
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
h1 {
|
| 110 |
+
margin: 18px 0 14px;
|
| 111 |
+
max-width: 780px;
|
| 112 |
+
font-size: clamp(2.8rem, 6vw, 5rem);
|
| 113 |
+
line-height: 0.96;
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
.lede {
|
| 117 |
+
max-width: 720px;
|
| 118 |
+
margin: 0;
|
| 119 |
+
color: var(--muted);
|
| 120 |
+
font-size: 1.02rem;
|
| 121 |
+
line-height: 1.8;
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
.hero-actions {
|
| 125 |
+
display: flex;
|
| 126 |
+
flex-wrap: wrap;
|
| 127 |
+
gap: 12px;
|
| 128 |
+
margin-top: 28px;
|
| 129 |
+
}
|
| 130 |
+
|
| 131 |
+
.button {
|
| 132 |
+
display: inline-flex;
|
| 133 |
+
align-items: center;
|
| 134 |
+
justify-content: center;
|
| 135 |
+
min-width: 148px;
|
| 136 |
+
padding: 12px 18px;
|
| 137 |
+
border-radius: 999px;
|
| 138 |
+
border: 1px solid var(--line);
|
| 139 |
+
background: rgba(250, 245, 238, 0.04);
|
| 140 |
+
color: var(--text);
|
| 141 |
+
font-size: 14px;
|
| 142 |
+
font-weight: 600;
|
| 143 |
+
transition:
|
| 144 |
+
transform 0.18s ease,
|
| 145 |
+
border-color 0.18s ease,
|
| 146 |
+
background 0.18s ease;
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
.button:hover {
|
| 150 |
+
transform: translateY(-1px);
|
| 151 |
+
border-color: rgba(242, 221, 195, 0.28);
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.button.primary {
|
| 155 |
+
background: linear-gradient(135deg, #f2ddc3, #ddbb8e);
|
| 156 |
+
color: #1a140f;
|
| 157 |
+
border-color: transparent;
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
.hero-stats {
|
| 161 |
+
display: grid;
|
| 162 |
+
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
| 163 |
+
gap: 14px;
|
| 164 |
+
margin-top: 28px;
|
| 165 |
+
}
|
| 166 |
+
|
| 167 |
+
.stat {
|
| 168 |
+
padding: 18px;
|
| 169 |
+
border-radius: 20px;
|
| 170 |
+
border: 1px solid var(--line);
|
| 171 |
+
background: rgba(250, 245, 238, 0.03);
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
.stat-label {
|
| 175 |
+
display: block;
|
| 176 |
+
margin-bottom: 8px;
|
| 177 |
+
color: var(--muted);
|
| 178 |
+
font-size: 12px;
|
| 179 |
+
letter-spacing: 0.08em;
|
| 180 |
+
text-transform: uppercase;
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
.stat strong {
|
| 184 |
+
display: block;
|
| 185 |
+
font-size: 1.05rem;
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
.stack {
|
| 189 |
+
display: grid;
|
| 190 |
+
gap: 18px;
|
| 191 |
+
margin-top: 18px;
|
| 192 |
+
}
|
| 193 |
+
|
| 194 |
+
.section {
|
| 195 |
+
padding: 28px 30px;
|
| 196 |
+
}
|
| 197 |
+
|
| 198 |
+
.section-head {
|
| 199 |
+
display: flex;
|
| 200 |
+
flex-wrap: wrap;
|
| 201 |
+
align-items: end;
|
| 202 |
+
justify-content: space-between;
|
| 203 |
+
gap: 12px;
|
| 204 |
+
margin-bottom: 18px;
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
.section-head h2 {
|
| 208 |
+
margin: 0;
|
| 209 |
+
font-size: 2rem;
|
| 210 |
+
}
|
| 211 |
+
|
| 212 |
+
.section-head p {
|
| 213 |
+
margin: 0;
|
| 214 |
+
max-width: 560px;
|
| 215 |
+
color: var(--muted);
|
| 216 |
+
line-height: 1.7;
|
| 217 |
+
}
|
| 218 |
+
|
| 219 |
+
.model-grid {
|
| 220 |
+
display: grid;
|
| 221 |
+
grid-template-columns: repeat(auto-fit, minmax(240px, 1fr));
|
| 222 |
+
gap: 14px;
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
.model-card {
|
| 226 |
+
padding: 18px;
|
| 227 |
+
border-radius: 20px;
|
| 228 |
+
border: 1px solid var(--line);
|
| 229 |
+
background: rgba(250, 245, 238, 0.03);
|
| 230 |
+
}
|
| 231 |
+
|
| 232 |
+
.model-card .kicker {
|
| 233 |
+
display: inline-flex;
|
| 234 |
+
margin-bottom: 10px;
|
| 235 |
+
padding: 5px 10px;
|
| 236 |
+
border-radius: 999px;
|
| 237 |
+
background: rgba(169, 215, 184, 0.12);
|
| 238 |
+
color: var(--success);
|
| 239 |
+
font-size: 11px;
|
| 240 |
+
font-weight: 700;
|
| 241 |
+
letter-spacing: 0.08em;
|
| 242 |
+
text-transform: uppercase;
|
| 243 |
+
}
|
| 244 |
+
|
| 245 |
+
.model-card h3 {
|
| 246 |
+
margin: 0 0 10px;
|
| 247 |
+
font-size: 1.1rem;
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
.meta-line {
|
| 251 |
+
margin: 0;
|
| 252 |
+
color: var(--muted);
|
| 253 |
+
font-size: 14px;
|
| 254 |
+
line-height: 1.7;
|
| 255 |
+
}
|
| 256 |
+
|
| 257 |
+
.split {
|
| 258 |
+
display: grid;
|
| 259 |
+
grid-template-columns: minmax(0, 1.15fr) minmax(0, 0.85fr);
|
| 260 |
+
gap: 18px;
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
.code-block {
|
| 264 |
+
margin: 0;
|
| 265 |
+
padding: 18px;
|
| 266 |
+
border-radius: 20px;
|
| 267 |
+
border: 1px solid var(--line);
|
| 268 |
+
background: #0f0c0a;
|
| 269 |
+
color: #f4e7d6;
|
| 270 |
+
overflow: auto;
|
| 271 |
+
line-height: 1.7;
|
| 272 |
+
}
|
| 273 |
+
|
| 274 |
+
.route-list {
|
| 275 |
+
list-style: none;
|
| 276 |
+
margin: 0;
|
| 277 |
+
padding: 0;
|
| 278 |
+
display: grid;
|
| 279 |
+
gap: 10px;
|
| 280 |
+
}
|
| 281 |
+
|
| 282 |
+
.route-list li {
|
| 283 |
+
padding: 14px 16px;
|
| 284 |
+
border-radius: 16px;
|
| 285 |
+
border: 1px solid var(--line);
|
| 286 |
+
background: rgba(250, 245, 238, 0.03);
|
| 287 |
+
}
|
| 288 |
+
|
| 289 |
+
.route-list code {
|
| 290 |
+
color: var(--accent-strong);
|
| 291 |
+
}
|
| 292 |
+
|
| 293 |
+
.note {
|
| 294 |
+
margin-top: 14px;
|
| 295 |
+
color: var(--muted);
|
| 296 |
+
line-height: 1.7;
|
| 297 |
+
}
|
| 298 |
+
|
| 299 |
+
.empty {
|
| 300 |
+
padding: 18px;
|
| 301 |
+
border-radius: 18px;
|
| 302 |
+
border: 1px dashed var(--line);
|
| 303 |
+
color: var(--muted);
|
| 304 |
+
}
|
| 305 |
+
|
| 306 |
+
@media (max-width: 860px) {
|
| 307 |
+
.shell {
|
| 308 |
+
width: min(100vw - 20px, 1120px);
|
| 309 |
+
padding-top: 18px;
|
| 310 |
+
}
|
| 311 |
+
|
| 312 |
+
.hero,
|
| 313 |
+
.section {
|
| 314 |
+
padding: 22px;
|
| 315 |
+
}
|
| 316 |
+
|
| 317 |
+
.split {
|
| 318 |
+
grid-template-columns: 1fr;
|
| 319 |
+
}
|
| 320 |
+
}
|
| 321 |
+
</style>
|
| 322 |
+
</head>
|
| 323 |
+
<body>
|
| 324 |
+
<main class="shell">
|
| 325 |
+
<section class="panel hero">
|
| 326 |
+
<div class="eyebrow">Hosted bridge</div>
|
| 327 |
+
<h1>Claude Web accounts, exposed as clean API routes.</h1>
|
| 328 |
+
<p class="lede">
|
| 329 |
+
Web2API turns browser-authenticated Claude sessions into OpenAI-compatible and
|
| 330 |
+
Anthropic-compatible endpoints, with a compact admin dashboard for proxy groups,
|
| 331 |
+
account auth, runtime status, and persistent global auth settings.
|
| 332 |
+
</p>
|
| 333 |
+
<div class="hero-actions">
|
| 334 |
+
<a class="button primary" href="/login">Open dashboard</a>
|
| 335 |
+
<a class="button" href="#supported-models">Supported models</a>
|
| 336 |
+
<a class="button" href="/healthz">View health JSON</a>
|
| 337 |
+
</div>
|
| 338 |
+
<div class="hero-stats">
|
| 339 |
+
<div class="stat">
|
| 340 |
+
<span class="stat-label">Provider</span>
|
| 341 |
+
<strong>claude</strong>
|
| 342 |
+
</div>
|
| 343 |
+
<div class="stat">
|
| 344 |
+
<span class="stat-label">Default model</span>
|
| 345 |
+
<strong id="defaultModel">Loading…</strong>
|
| 346 |
+
</div>
|
| 347 |
+
<div class="stat">
|
| 348 |
+
<span class="stat-label">Config dashboard</span>
|
| 349 |
+
<strong id="dashboardStatus">Checking…</strong>
|
| 350 |
+
</div>
|
| 351 |
+
</div>
|
| 352 |
+
</section>
|
| 353 |
+
|
| 354 |
+
<div class="stack">
|
| 355 |
+
<section class="panel section" id="supported-models">
|
| 356 |
+
<div class="section-head">
|
| 357 |
+
<div>
|
| 358 |
+
<h2>Supported models</h2>
|
| 359 |
+
</div>
|
| 360 |
+
<p>
|
| 361 |
+
Public model IDs are accepted on both OpenAI-compatible and Anthropic-compatible
|
| 362 |
+
routes. The cards below show the exact public → upstream mapping used by the server.
|
| 363 |
+
</p>
|
| 364 |
+
</div>
|
| 365 |
+
<div id="modelsList" class="model-grid">
|
| 366 |
+
<div class="empty">Loading supported models…</div>
|
| 367 |
+
</div>
|
| 368 |
+
</section>
|
| 369 |
+
|
| 370 |
+
<section class="panel section">
|
| 371 |
+
<div class="section-head">
|
| 372 |
+
<div>
|
| 373 |
+
<h2>Quick start</h2>
|
| 374 |
+
</div>
|
| 375 |
+
<p>
|
| 376 |
+
Point your client to the OpenAI-compatible route, then use one of the public model IDs
|
| 377 |
+
from the list above.
|
| 378 |
+
</p>
|
| 379 |
+
</div>
|
| 380 |
+
<div class="split">
|
| 381 |
+
<pre class="code-block"><code>curl https://YOUR_HOST/openai/claude/v1/chat/completions \
|
| 382 |
+
-H "Authorization: Bearer $WEB2API_AUTH_API_KEY" \
|
| 383 |
+
-H "Content-Type: application/json" \
|
| 384 |
+
-d '{
|
| 385 |
+
"model": "claude-sonnet-4.6",
|
| 386 |
+
"messages": [
|
| 387 |
+
{"role": "user", "content": "Hello from Web2API"}
|
| 388 |
+
]
|
| 389 |
+
}'</code></pre>
|
| 390 |
+
<div>
|
| 391 |
+
<ul class="route-list">
|
| 392 |
+
<li><code>POST /openai/claude/v1/chat/completions</code></li>
|
| 393 |
+
<li><code>POST /claude/v1/chat/completions</code></li>
|
| 394 |
+
<li><code>POST /anthropic/claude/v1/messages</code></li>
|
| 395 |
+
<li><code>GET /api/models/claude/metadata</code></li>
|
| 396 |
+
<li><code>GET /healthz</code></li>
|
| 397 |
+
</ul>
|
| 398 |
+
<p class="note">
|
| 399 |
+
Use <code>/config</code> after signing in to edit proxy groups, account JSON auth, API
|
| 400 |
+
keys, and the admin password.
|
| 401 |
+
</p>
|
| 402 |
+
</div>
|
| 403 |
+
</div>
|
| 404 |
+
</section>
|
| 405 |
+
</div>
|
| 406 |
+
</main>
|
| 407 |
+
|
| 408 |
+
<script>
|
| 409 |
+
const defaultModelEl = document.getElementById('defaultModel')
|
| 410 |
+
const dashboardStatusEl = document.getElementById('dashboardStatus')
|
| 411 |
+
const modelsListEl = document.getElementById('modelsList')
|
| 412 |
+
|
| 413 |
+
function escapeHtml(value) {
|
| 414 |
+
return String(value == null ? '' : value)
|
| 415 |
+
.replace(/&/g, '&')
|
| 416 |
+
.replace(/</g, '<')
|
| 417 |
+
.replace(/>/g, '>')
|
| 418 |
+
.replace(/"/g, '"')
|
| 419 |
+
}
|
| 420 |
+
|
| 421 |
+
function renderModels(metadata) {
|
| 422 |
+
const mapping = (metadata && metadata.model_mapping) || {}
|
| 423 |
+
const entries = Object.entries(mapping)
|
| 424 |
+
defaultModelEl.textContent = (metadata && metadata.default_model) || 'Unavailable'
|
| 425 |
+
if (!entries.length) {
|
| 426 |
+
modelsListEl.innerHTML = '<div class="empty">No model metadata available.</div>'
|
| 427 |
+
return
|
| 428 |
+
}
|
| 429 |
+
modelsListEl.innerHTML = entries
|
| 430 |
+
.map(
|
| 431 |
+
([publicModel, upstreamModel]) => `
|
| 432 |
+
<article class="model-card">
|
| 433 |
+
${
|
| 434 |
+
publicModel === metadata.default_model
|
| 435 |
+
? '<span class="kicker">Default</span>'
|
| 436 |
+
: '<span class="kicker">Available</span>'
|
| 437 |
+
}
|
| 438 |
+
<h3><code>${escapeHtml(publicModel)}</code></h3>
|
| 439 |
+
<p class="meta-line">Accepted public ID for client requests.</p>
|
| 440 |
+
<p class="meta-line"><strong>Upstream:</strong> <code>${escapeHtml(upstreamModel)}</code></p>
|
| 441 |
+
</article>
|
| 442 |
+
`
|
| 443 |
+
)
|
| 444 |
+
.join('')
|
| 445 |
+
}
|
| 446 |
+
|
| 447 |
+
async function loadMetadata() {
|
| 448 |
+
try {
|
| 449 |
+
const res = await fetch('/api/models/claude/metadata')
|
| 450 |
+
if (!res.ok) throw new Error(await res.text())
|
| 451 |
+
renderModels(await res.json())
|
| 452 |
+
} catch (error) {
|
| 453 |
+
defaultModelEl.textContent = 'Unavailable'
|
| 454 |
+
modelsListEl.innerHTML = '<div class="empty">Failed to load model metadata.</div>'
|
| 455 |
+
}
|
| 456 |
+
}
|
| 457 |
+
|
| 458 |
+
async function loadHealth() {
|
| 459 |
+
try {
|
| 460 |
+
const res = await fetch('/healthz')
|
| 461 |
+
if (!res.ok) throw new Error(await res.text())
|
| 462 |
+
const data = await res.json()
|
| 463 |
+
dashboardStatusEl.textContent = data.config_login_enabled
|
| 464 |
+
? 'Enabled at /login'
|
| 465 |
+
: 'Disabled'
|
| 466 |
+
} catch (_) {
|
| 467 |
+
dashboardStatusEl.textContent = 'Unavailable'
|
| 468 |
+
}
|
| 469 |
+
}
|
| 470 |
+
|
| 471 |
+
void Promise.all([loadMetadata(), loadHealth()])
|
| 472 |
+
</script>
|
| 473 |
+
</body>
|
| 474 |
+
</html>
|
core/static/login.html
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8" />
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
| 6 |
+
<title>Admin sign in</title>
|
| 7 |
+
<style>
|
| 8 |
+
:root {
|
| 9 |
+
--bg: #120f0d;
|
| 10 |
+
--bg-deep: #080706;
|
| 11 |
+
--panel: rgba(29, 24, 21, 0.9);
|
| 12 |
+
--line: rgba(247, 239, 230, 0.12);
|
| 13 |
+
--text: #f7efe6;
|
| 14 |
+
--muted: #b6aa98;
|
| 15 |
+
--accent: #efd9bc;
|
| 16 |
+
--accent-strong: #ddbb8e;
|
| 17 |
+
--danger-bg: rgba(127, 29, 29, 0.24);
|
| 18 |
+
--danger-line: rgba(248, 113, 113, 0.28);
|
| 19 |
+
--danger-text: #fecaca;
|
| 20 |
+
--shadow: 0 34px 90px rgba(0, 0, 0, 0.45);
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
* {
|
| 24 |
+
box-sizing: border-box;
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
body {
|
| 28 |
+
margin: 0;
|
| 29 |
+
min-height: 100vh;
|
| 30 |
+
display: grid;
|
| 31 |
+
place-items: center;
|
| 32 |
+
padding: 20px;
|
| 33 |
+
color: var(--text);
|
| 34 |
+
font-family: "Avenir Next", "Segoe UI", ui-sans-serif, system-ui, sans-serif;
|
| 35 |
+
background:
|
| 36 |
+
radial-gradient(circle at top, rgba(239, 217, 188, 0.16), transparent 28%),
|
| 37 |
+
linear-gradient(180deg, #171210 0%, var(--bg) 44%, var(--bg-deep) 100%);
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
body::before {
|
| 41 |
+
content: "";
|
| 42 |
+
position: fixed;
|
| 43 |
+
inset: 0;
|
| 44 |
+
pointer-events: none;
|
| 45 |
+
opacity: 0.14;
|
| 46 |
+
background-image: linear-gradient(rgba(255, 255, 255, 0.03) 1px, transparent 1px);
|
| 47 |
+
background-size: 100% 3px;
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
.card {
|
| 51 |
+
width: min(460px, 100%);
|
| 52 |
+
padding: 30px;
|
| 53 |
+
border-radius: 30px;
|
| 54 |
+
border: 1px solid var(--line);
|
| 55 |
+
background: linear-gradient(180deg, rgba(39, 32, 28, 0.96), rgba(24, 20, 17, 0.94));
|
| 56 |
+
box-shadow: var(--shadow);
|
| 57 |
+
backdrop-filter: blur(18px);
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
.eyebrow {
|
| 61 |
+
display: inline-flex;
|
| 62 |
+
align-items: center;
|
| 63 |
+
gap: 10px;
|
| 64 |
+
padding: 8px 14px;
|
| 65 |
+
border-radius: 999px;
|
| 66 |
+
border: 1px solid rgba(239, 217, 188, 0.18);
|
| 67 |
+
background: rgba(239, 217, 188, 0.08);
|
| 68 |
+
color: var(--accent-strong);
|
| 69 |
+
font-size: 11px;
|
| 70 |
+
letter-spacing: 0.18em;
|
| 71 |
+
font-weight: 700;
|
| 72 |
+
text-transform: uppercase;
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
h1 {
|
| 76 |
+
margin: 18px 0 10px;
|
| 77 |
+
font-family: "Iowan Old Style", "Palatino Linotype", "Book Antiqua", Georgia, serif;
|
| 78 |
+
font-size: clamp(2.2rem, 7vw, 3.2rem);
|
| 79 |
+
line-height: 0.96;
|
| 80 |
+
letter-spacing: -0.04em;
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
p {
|
| 84 |
+
margin: 0 0 22px;
|
| 85 |
+
color: var(--muted);
|
| 86 |
+
line-height: 1.75;
|
| 87 |
+
}
|
| 88 |
+
|
| 89 |
+
label {
|
| 90 |
+
display: block;
|
| 91 |
+
margin-bottom: 8px;
|
| 92 |
+
color: var(--muted);
|
| 93 |
+
font-size: 13px;
|
| 94 |
+
letter-spacing: 0.06em;
|
| 95 |
+
text-transform: uppercase;
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
input {
|
| 99 |
+
width: 100%;
|
| 100 |
+
padding: 14px 16px;
|
| 101 |
+
border-radius: 18px;
|
| 102 |
+
border: 1px solid var(--line);
|
| 103 |
+
background: rgba(8, 7, 6, 0.46);
|
| 104 |
+
color: var(--text);
|
| 105 |
+
font-size: 15px;
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
input:focus {
|
| 109 |
+
outline: none;
|
| 110 |
+
border-color: rgba(239, 217, 188, 0.34);
|
| 111 |
+
box-shadow: 0 0 0 4px rgba(239, 217, 188, 0.08);
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
.actions {
|
| 115 |
+
display: flex;
|
| 116 |
+
flex-wrap: wrap;
|
| 117 |
+
gap: 12px;
|
| 118 |
+
margin-top: 18px;
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
button,
|
| 122 |
+
.link {
|
| 123 |
+
display: inline-flex;
|
| 124 |
+
align-items: center;
|
| 125 |
+
justify-content: center;
|
| 126 |
+
min-height: 48px;
|
| 127 |
+
padding: 0 18px;
|
| 128 |
+
border-radius: 999px;
|
| 129 |
+
font-size: 14px;
|
| 130 |
+
font-weight: 600;
|
| 131 |
+
text-decoration: none;
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
button {
|
| 135 |
+
flex: 1 1 180px;
|
| 136 |
+
border: none;
|
| 137 |
+
cursor: pointer;
|
| 138 |
+
color: #1a140f;
|
| 139 |
+
background: linear-gradient(135deg, var(--accent), var(--accent-strong));
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
button:disabled {
|
| 143 |
+
cursor: wait;
|
| 144 |
+
opacity: 0.78;
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
.link {
|
| 148 |
+
flex: 1 1 140px;
|
| 149 |
+
border: 1px solid var(--line);
|
| 150 |
+
color: var(--text);
|
| 151 |
+
background: rgba(255, 255, 255, 0.03);
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.note {
|
| 155 |
+
margin-top: 16px;
|
| 156 |
+
font-size: 13px;
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
.error {
|
| 160 |
+
display: none;
|
| 161 |
+
margin-top: 16px;
|
| 162 |
+
padding: 12px 14px;
|
| 163 |
+
border-radius: 18px;
|
| 164 |
+
border: 1px solid var(--danger-line);
|
| 165 |
+
background: var(--danger-bg);
|
| 166 |
+
color: var(--danger-text);
|
| 167 |
+
font-size: 13px;
|
| 168 |
+
line-height: 1.6;
|
| 169 |
+
}
|
| 170 |
+
</style>
|
| 171 |
+
</head>
|
| 172 |
+
<body>
|
| 173 |
+
<form class="card" id="loginForm">
|
| 174 |
+
<div class="eyebrow">Admin access</div>
|
| 175 |
+
<h1>Sign in to the config dashboard.</h1>
|
| 176 |
+
<p>
|
| 177 |
+
Use the current admin password for Web2API. If the password is managed by environment
|
| 178 |
+
variables, this page still accepts that live value.
|
| 179 |
+
</p>
|
| 180 |
+
|
| 181 |
+
<label for="secret">Admin password</label>
|
| 182 |
+
<input
|
| 183 |
+
id="secret"
|
| 184 |
+
name="secret"
|
| 185 |
+
type="password"
|
| 186 |
+
autocomplete="current-password"
|
| 187 |
+
placeholder="Enter admin password"
|
| 188 |
+
/>
|
| 189 |
+
|
| 190 |
+
<div class="actions">
|
| 191 |
+
<button type="submit" id="submitBtn">Sign in</button>
|
| 192 |
+
<a class="link" href="/">Back home</a>
|
| 193 |
+
</div>
|
| 194 |
+
|
| 195 |
+
<p class="note">The dashboard session is stored in an HTTP-only cookie after sign-in.</p>
|
| 196 |
+
<div class="error" id="error"></div>
|
| 197 |
+
</form>
|
| 198 |
+
|
| 199 |
+
<script>
|
| 200 |
+
const form = document.getElementById('loginForm')
|
| 201 |
+
const secretInput = document.getElementById('secret')
|
| 202 |
+
const errorEl = document.getElementById('error')
|
| 203 |
+
const submitBtn = document.getElementById('submitBtn')
|
| 204 |
+
|
| 205 |
+
function showError(message) {
|
| 206 |
+
errorEl.textContent = message
|
| 207 |
+
errorEl.style.display = 'block'
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
function hideError() {
|
| 211 |
+
errorEl.style.display = 'none'
|
| 212 |
+
errorEl.textContent = ''
|
| 213 |
+
}
|
| 214 |
+
|
| 215 |
+
form.addEventListener('submit', async (event) => {
|
| 216 |
+
event.preventDefault()
|
| 217 |
+
hideError()
|
| 218 |
+
|
| 219 |
+
const secret = secretInput.value.trim()
|
| 220 |
+
if (!secret) {
|
| 221 |
+
showError('Enter the admin password.')
|
| 222 |
+
secretInput.focus()
|
| 223 |
+
return
|
| 224 |
+
}
|
| 225 |
+
|
| 226 |
+
submitBtn.disabled = true
|
| 227 |
+
submitBtn.textContent = 'Signing in…'
|
| 228 |
+
try {
|
| 229 |
+
const res = await fetch('/api/admin/login', {
|
| 230 |
+
method: 'POST',
|
| 231 |
+
headers: { 'Content-Type': 'application/json' },
|
| 232 |
+
body: JSON.stringify({ secret }),
|
| 233 |
+
})
|
| 234 |
+
|
| 235 |
+
const text = await res.text()
|
| 236 |
+
let data = null
|
| 237 |
+
try {
|
| 238 |
+
data = text ? JSON.parse(text) : null
|
| 239 |
+
} catch (_) {}
|
| 240 |
+
|
| 241 |
+
if (!res.ok) {
|
| 242 |
+
throw new Error((data && data.detail) || text || 'Sign-in failed.')
|
| 243 |
+
}
|
| 244 |
+
|
| 245 |
+
window.location.href = '/config'
|
| 246 |
+
} catch (error) {
|
| 247 |
+
showError(error && error.message ? error.message : 'Sign-in failed.')
|
| 248 |
+
} finally {
|
| 249 |
+
submitBtn.disabled = false
|
| 250 |
+
submitBtn.textContent = 'Sign in'
|
| 251 |
+
}
|
| 252 |
+
})
|
| 253 |
+
</script>
|
| 254 |
+
</body>
|
| 255 |
+
</html>
|