Upload 45 files
Browse files- .gitattributes +2 -0
- Dockerfile +55 -0
- LICENSE +21 -0
- app/.DS_Store +0 -0
- app/api/admin/manage.py +1017 -0
- app/api/v1/chat.py +107 -0
- app/api/v1/images.py +53 -0
- app/api/v1/models.py +114 -0
- app/core/auth.py +66 -0
- app/core/config.py +203 -0
- app/core/exception.py +119 -0
- app/core/logger.py +130 -0
- app/core/proxy_pool.py +170 -0
- app/core/storage.py +445 -0
- app/models/grok_models.py +163 -0
- app/models/openai_schema.py +103 -0
- app/services/api_keys.py +195 -0
- app/services/grok/cache.py +243 -0
- app/services/grok/client.py +358 -0
- app/services/grok/create.py +140 -0
- app/services/grok/processer.py +430 -0
- app/services/grok/statsig.py +80 -0
- app/services/grok/token.py +619 -0
- app/services/grok/upload.py +209 -0
- app/services/mcp/__init__.py +6 -0
- app/services/mcp/server.py +63 -0
- app/services/mcp/tools.py +77 -0
- app/services/request_logger.py +143 -0
- app/services/request_stats.py +205 -0
- app/template/admin.html +0 -0
- app/template/favicon.png +3 -0
- app/template/login.html +76 -0
- data/setting.toml +25 -0
- data/temp/image.temp +0 -0
- data/temp/video/users-8522ce45-679b-4e0e-a0f7-bb18f434eb6b-generated-15f7113f-5d16-4ff1-bdaa-a2eabd66671c-generated_video.mp4 +3 -0
- data/token.json +4 -0
- docker-compose.yml +25 -0
- docker-entrypoint.sh +56 -0
- main.py +187 -0
- pyproject.toml +25 -0
- readme.md +254 -0
- requirements.txt +17 -0
- test/test_concurrency.py +276 -0
- test/test_concurrency.sh +177 -0
- test_key.py +50 -0
- uv.lock +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
app/template/favicon.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
data/temp/video/users-8522ce45-679b-4e0e-a0f7-bb18f434eb6b-generated-15f7113f-5d16-4ff1-bdaa-a2eabd66671c-generated_video.mp4 filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ๆๅปบ้ถๆฎต
|
| 2 |
+
FROM python:3.11-slim AS builder
|
| 3 |
+
|
| 4 |
+
WORKDIR /build
|
| 5 |
+
|
| 6 |
+
# ๅฎ่ฃ
ไพ่ตๅฐ็ฌ็ซ็ฎๅฝ
|
| 7 |
+
COPY requirements.txt .
|
| 8 |
+
RUN pip install --no-cache-dir --only-binary=:all: --prefix=/install -r requirements.txt && \
|
| 9 |
+
find /install -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true && \
|
| 10 |
+
find /install -type d -name "tests" -exec rm -rf {} + 2>/dev/null || true && \
|
| 11 |
+
find /install -type d -name "test" -exec rm -rf {} + 2>/dev/null || true && \
|
| 12 |
+
find /install -type d -name "*.dist-info" -exec sh -c 'rm -f "$1"/RECORD "$1"/INSTALLER' _ {} \; && \
|
| 13 |
+
find /install -type f -name "*.pyc" -delete && \
|
| 14 |
+
find /install -type f -name "*.pyo" -delete && \
|
| 15 |
+
find /install -name "*.so" -exec strip --strip-unneeded {} \; 2>/dev/null || true
|
| 16 |
+
|
| 17 |
+
# ่ฟ่ก้ถๆฎต - ไฝฟ็จๆๅฐ้ๅ
|
| 18 |
+
FROM python:3.11-slim
|
| 19 |
+
|
| 20 |
+
WORKDIR /app
|
| 21 |
+
|
| 22 |
+
# ๆธ
็ๅบ็ก้ๅไธญ็ๅไฝๆไปถ
|
| 23 |
+
RUN rm -rf /usr/share/doc/* \
|
| 24 |
+
/usr/share/man/* \
|
| 25 |
+
/usr/share/locale/* \
|
| 26 |
+
/var/cache/apt/* \
|
| 27 |
+
/var/lib/apt/lists/* \
|
| 28 |
+
/tmp/* \
|
| 29 |
+
/var/tmp/*
|
| 30 |
+
|
| 31 |
+
# ไปๆๅปบ้ถๆฎตๅคๅถๅทฒๅฎ่ฃ
็ๅ
|
| 32 |
+
COPY --from=builder /install /usr/local
|
| 33 |
+
|
| 34 |
+
# ๅๅปบๅฟ
่ฆ็็ฎๅฝ๏ผๅ
ๆฌ็จไบๆ่ฝฝ็data็ฎๅฝ๏ผ
|
| 35 |
+
RUN mkdir -p /app/logs /app/data/temp/image /app/data/temp/video
|
| 36 |
+
|
| 37 |
+
# ๅคๅถๅบ็จไปฃ็
|
| 38 |
+
COPY app/ ./app/
|
| 39 |
+
COPY main.py .
|
| 40 |
+
|
| 41 |
+
# ๅคๅถๅนถ่ฎพ็ฝฎ entrypoint ่ๆฌ
|
| 42 |
+
COPY docker-entrypoint.sh /usr/local/bin/
|
| 43 |
+
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
| 44 |
+
|
| 45 |
+
# ๅ ้ค Python ๅญ่็ ๅ็ผๅญ
|
| 46 |
+
ENV PYTHONDONTWRITEBYTECODE=1 \
|
| 47 |
+
PYTHONUNBUFFERED=1
|
| 48 |
+
|
| 49 |
+
EXPOSE 8000
|
| 50 |
+
|
| 51 |
+
# ไฝฟ็จ entrypoint ่ๆฌๅๅงๅ้
็ฝฎ
|
| 52 |
+
ENTRYPOINT ["docker-entrypoint.sh"]
|
| 53 |
+
|
| 54 |
+
# ้ป่ฎคๅฏๅจๅฝไปค
|
| 55 |
+
CMD ["python", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2025 Chenyme
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
app/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
app/api/admin/manage.py
ADDED
|
@@ -0,0 +1,1017 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""็ฎก็ๆฅๅฃ - Token็ฎก็ๅ็ณป็ป้
็ฝฎ"""
|
| 2 |
+
|
| 3 |
+
import secrets
|
| 4 |
+
import time
|
| 5 |
+
from typing import Dict, Any, List, Optional
|
| 6 |
+
from datetime import datetime, timedelta
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from fastapi import APIRouter, HTTPException, Depends, Header, Query
|
| 9 |
+
from fastapi.responses import HTMLResponse
|
| 10 |
+
from pydantic import BaseModel
|
| 11 |
+
|
| 12 |
+
from app.core.config import setting
|
| 13 |
+
from app.core.logger import logger
|
| 14 |
+
from app.services.grok.token import token_manager
|
| 15 |
+
from app.services.request_stats import request_stats
|
| 16 |
+
from app.models.grok_models import TokenType
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
router = APIRouter(tags=["็ฎก็"])
|
| 20 |
+
|
| 21 |
+
# ๅธธ้
|
| 22 |
+
STATIC_DIR = Path(__file__).parents[2] / "template"
|
| 23 |
+
TEMP_DIR = Path(__file__).parents[3] / "data" / "temp"
|
| 24 |
+
IMAGE_CACHE_DIR = TEMP_DIR / "image"
|
| 25 |
+
VIDEO_CACHE_DIR = TEMP_DIR / "video"
|
| 26 |
+
SESSION_EXPIRE_HOURS = 24
|
| 27 |
+
BYTES_PER_KB = 1024
|
| 28 |
+
BYTES_PER_MB = 1024 * 1024
|
| 29 |
+
|
| 30 |
+
# ไผ่ฏๅญๅจ
|
| 31 |
+
_sessions: Dict[str, datetime] = {}
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
# === ่ฏทๆฑ/ๅๅบๆจกๅ ===
|
| 35 |
+
|
| 36 |
+
class LoginRequest(BaseModel):
|
| 37 |
+
username: str
|
| 38 |
+
password: str
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class LoginResponse(BaseModel):
|
| 42 |
+
success: bool
|
| 43 |
+
token: Optional[str] = None
|
| 44 |
+
message: str
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class AddTokensRequest(BaseModel):
|
| 48 |
+
tokens: List[str]
|
| 49 |
+
token_type: str
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class DeleteTokensRequest(BaseModel):
|
| 53 |
+
tokens: List[str]
|
| 54 |
+
token_type: str
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class TokenInfo(BaseModel):
|
| 58 |
+
token: str
|
| 59 |
+
token_type: str
|
| 60 |
+
created_time: Optional[int] = None
|
| 61 |
+
remaining_queries: int
|
| 62 |
+
heavy_remaining_queries: int
|
| 63 |
+
status: str
|
| 64 |
+
tags: List[str] = []
|
| 65 |
+
note: str = ""
|
| 66 |
+
cooldown_until: Optional[int] = None
|
| 67 |
+
cooldown_remaining: int = 0
|
| 68 |
+
last_failure_time: Optional[int] = None
|
| 69 |
+
last_failure_reason: str = ""
|
| 70 |
+
limit_reason: str = ""
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class TokenListResponse(BaseModel):
|
| 74 |
+
success: bool
|
| 75 |
+
data: List[TokenInfo]
|
| 76 |
+
total: int
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class UpdateSettingsRequest(BaseModel):
|
| 80 |
+
global_config: Optional[Dict[str, Any]] = None
|
| 81 |
+
grok_config: Optional[Dict[str, Any]] = None
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class UpdateTokenTagsRequest(BaseModel):
|
| 85 |
+
token: str
|
| 86 |
+
token_type: str
|
| 87 |
+
tags: List[str]
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class UpdateTokenNoteRequest(BaseModel):
|
| 91 |
+
token: str
|
| 92 |
+
token_type: str
|
| 93 |
+
note: str
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class TestTokenRequest(BaseModel):
|
| 97 |
+
token: str
|
| 98 |
+
token_type: str
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
# === ่พ
ๅฉๅฝๆฐ ===
|
| 102 |
+
|
| 103 |
+
def validate_token_type(token_type_str: str) -> TokenType:
|
| 104 |
+
"""้ช่ฏToken็ฑปๅ"""
|
| 105 |
+
if token_type_str not in ["sso", "ssoSuper"]:
|
| 106 |
+
raise HTTPException(
|
| 107 |
+
status_code=400,
|
| 108 |
+
detail={"error": "ๆ ๆ็Token็ฑปๅ", "code": "INVALID_TYPE"}
|
| 109 |
+
)
|
| 110 |
+
return TokenType.NORMAL if token_type_str == "sso" else TokenType.SUPER
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def parse_created_time(created_time) -> Optional[int]:
|
| 114 |
+
"""่งฃๆๅๅปบๆถ้ด"""
|
| 115 |
+
if isinstance(created_time, str):
|
| 116 |
+
return int(created_time) if created_time else None
|
| 117 |
+
elif isinstance(created_time, int):
|
| 118 |
+
return created_time
|
| 119 |
+
return None
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _get_cooldown_remaining_ms(token_data: Dict[str, Any], now_ms: Optional[int] = None) -> int:
|
| 123 |
+
"""่ทๅๅทๅดๅฉไฝๆถ้ด๏ผๆฏซ็ง๏ผ."""
|
| 124 |
+
cooldown_until = token_data.get("cooldownUntil")
|
| 125 |
+
if not cooldown_until:
|
| 126 |
+
return 0
|
| 127 |
+
|
| 128 |
+
try:
|
| 129 |
+
now = now_ms if now_ms is not None else int(time.time() * 1000)
|
| 130 |
+
remaining = int(cooldown_until) - now
|
| 131 |
+
return remaining if remaining > 0 else 0
|
| 132 |
+
except (TypeError, ValueError):
|
| 133 |
+
return 0
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def _is_token_in_cooldown(token_data: Dict[str, Any], now_ms: Optional[int] = None) -> bool:
|
| 137 |
+
"""ๅคๆญTokenๆฏๅฆๅคไบ429ๅทๅดไธญ."""
|
| 138 |
+
return _get_cooldown_remaining_ms(token_data, now_ms) > 0
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def calculate_token_stats(tokens: Dict[str, Any], token_type: str) -> Dict[str, int]:
|
| 142 |
+
"""่ฎก็ฎToken็ป่ฎก."""
|
| 143 |
+
total = len(tokens)
|
| 144 |
+
expired = sum(1 for t in tokens.values() if t.get("status") == "expired")
|
| 145 |
+
now_ms = int(time.time() * 1000)
|
| 146 |
+
cooldown = 0
|
| 147 |
+
exhausted = 0
|
| 148 |
+
unused = 0
|
| 149 |
+
active = 0
|
| 150 |
+
|
| 151 |
+
for token_data in tokens.values():
|
| 152 |
+
if token_data.get("status") == "expired":
|
| 153 |
+
continue
|
| 154 |
+
|
| 155 |
+
if _is_token_in_cooldown(token_data, now_ms):
|
| 156 |
+
cooldown += 1
|
| 157 |
+
continue
|
| 158 |
+
|
| 159 |
+
remaining = token_data.get("remainingQueries", -1)
|
| 160 |
+
heavy_remaining = token_data.get("heavyremainingQueries", -1)
|
| 161 |
+
|
| 162 |
+
if token_type == "normal":
|
| 163 |
+
if remaining == -1:
|
| 164 |
+
unused += 1
|
| 165 |
+
elif remaining == 0:
|
| 166 |
+
exhausted += 1
|
| 167 |
+
else:
|
| 168 |
+
active += 1
|
| 169 |
+
else:
|
| 170 |
+
if remaining == -1 and heavy_remaining == -1:
|
| 171 |
+
unused += 1
|
| 172 |
+
elif remaining == 0 or heavy_remaining == 0:
|
| 173 |
+
exhausted += 1
|
| 174 |
+
else:
|
| 175 |
+
active += 1
|
| 176 |
+
|
| 177 |
+
limited = cooldown + exhausted
|
| 178 |
+
return {
|
| 179 |
+
"total": total,
|
| 180 |
+
"unused": unused,
|
| 181 |
+
"limited": limited,
|
| 182 |
+
"cooldown": cooldown,
|
| 183 |
+
"exhausted": exhausted,
|
| 184 |
+
"expired": expired,
|
| 185 |
+
"active": active
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def verify_admin_session(authorization: Optional[str] = Header(None)) -> bool:
|
| 190 |
+
"""้ช่ฏ็ฎก็ๅไผ่ฏ"""
|
| 191 |
+
if not authorization or not authorization.startswith("Bearer "):
|
| 192 |
+
raise HTTPException(status_code=401, detail={"error": "ๆชๆๆ่ฎฟ้ฎ", "code": "UNAUTHORIZED"})
|
| 193 |
+
|
| 194 |
+
token = authorization[7:]
|
| 195 |
+
|
| 196 |
+
if token not in _sessions:
|
| 197 |
+
raise HTTPException(status_code=401, detail={"error": "ไผ่ฏๆ ๆ", "code": "SESSION_INVALID"})
|
| 198 |
+
|
| 199 |
+
if datetime.now() > _sessions[token]:
|
| 200 |
+
del _sessions[token]
|
| 201 |
+
raise HTTPException(status_code=401, detail={"error": "ไผ่ฏๅทฒ่ฟๆ", "code": "SESSION_EXPIRED"})
|
| 202 |
+
|
| 203 |
+
return True
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def get_token_status(token_data: Dict[str, Any], token_type: str) -> str:
|
| 207 |
+
"""่ทๅToken็ถๆ."""
|
| 208 |
+
if token_data.get("status") == "expired":
|
| 209 |
+
return "ๅคฑๆ"
|
| 210 |
+
|
| 211 |
+
if _is_token_in_cooldown(token_data):
|
| 212 |
+
return "ๅทๅดไธญ"
|
| 213 |
+
|
| 214 |
+
remaining = token_data.get("remainingQueries", -1)
|
| 215 |
+
heavy_remaining = token_data.get("heavyremainingQueries", -1)
|
| 216 |
+
|
| 217 |
+
if token_type == "ssoSuper":
|
| 218 |
+
if remaining == -1 and heavy_remaining == -1:
|
| 219 |
+
return "ๆชไฝฟ็จ"
|
| 220 |
+
if remaining == 0 or heavy_remaining == 0:
|
| 221 |
+
return "้ขๅบฆ่ๅฐฝ"
|
| 222 |
+
return "ๆญฃๅธธ"
|
| 223 |
+
|
| 224 |
+
if remaining == -1:
|
| 225 |
+
return "ๆชไฝฟ็จ"
|
| 226 |
+
if remaining == 0:
|
| 227 |
+
return "้ขๅบฆ่ๅฐฝ"
|
| 228 |
+
return "ๆญฃๅธธ"
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def _calculate_dir_size(directory: Path) -> int:
|
| 232 |
+
"""่ฎก็ฎ็ฎๅฝๅคงๅฐ"""
|
| 233 |
+
total = 0
|
| 234 |
+
for file_path in directory.iterdir():
|
| 235 |
+
if file_path.is_file():
|
| 236 |
+
try:
|
| 237 |
+
total += file_path.stat().st_size
|
| 238 |
+
except Exception as e:
|
| 239 |
+
logger.warning(f"[Admin] ๆ ๆณ่ทๅๆไปถๅคงๅฐ: {file_path.name}, {e}")
|
| 240 |
+
return total
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def _format_size(size_bytes: int) -> str:
|
| 244 |
+
"""ๆ ผๅผๅๆไปถๅคงๅฐ"""
|
| 245 |
+
size_mb = size_bytes / BYTES_PER_MB
|
| 246 |
+
if size_mb < 1:
|
| 247 |
+
return f"{size_bytes / BYTES_PER_KB:.1f} KB"
|
| 248 |
+
return f"{size_mb:.1f} MB"
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# === ้กต้ข่ทฏ็ฑ ===
|
| 252 |
+
|
| 253 |
+
@router.get("/login", response_class=HTMLResponse)
|
| 254 |
+
async def login_page():
|
| 255 |
+
"""็ปๅฝ้กต้ข"""
|
| 256 |
+
login_html = STATIC_DIR / "login.html"
|
| 257 |
+
if login_html.exists():
|
| 258 |
+
return login_html.read_text(encoding="utf-8")
|
| 259 |
+
raise HTTPException(status_code=404, detail="็ปๅฝ้กต้ขไธๅญๅจ")
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
@router.get("/manage", response_class=HTMLResponse)
|
| 263 |
+
async def manage_page():
|
| 264 |
+
"""็ฎก็้กต้ข"""
|
| 265 |
+
admin_html = STATIC_DIR / "admin.html"
|
| 266 |
+
if admin_html.exists():
|
| 267 |
+
return admin_html.read_text(encoding="utf-8")
|
| 268 |
+
raise HTTPException(status_code=404, detail="็ฎก็้กต้ขไธๅญๅจ")
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
# === API็ซฏ็น ===
|
| 272 |
+
|
| 273 |
+
@router.post("/api/login", response_model=LoginResponse)
|
| 274 |
+
async def admin_login(request: LoginRequest) -> LoginResponse:
|
| 275 |
+
"""็ฎก็ๅ็ปๅฝ"""
|
| 276 |
+
try:
|
| 277 |
+
logger.debug(f"[Admin] ็ปๅฝๅฐ่ฏ: {request.username}")
|
| 278 |
+
|
| 279 |
+
expected_user = setting.global_config.get("admin_username", "")
|
| 280 |
+
expected_pass = setting.global_config.get("admin_password", "")
|
| 281 |
+
|
| 282 |
+
if request.username != expected_user or request.password != expected_pass:
|
| 283 |
+
logger.warning(f"[Admin] ็ปๅฝๅคฑ่ดฅ: {request.username}")
|
| 284 |
+
return LoginResponse(success=False, message="็จๆทๅๆๅฏ็ ้่ฏฏ")
|
| 285 |
+
|
| 286 |
+
session_token = secrets.token_urlsafe(32)
|
| 287 |
+
_sessions[session_token] = datetime.now() + timedelta(hours=SESSION_EXPIRE_HOURS)
|
| 288 |
+
|
| 289 |
+
logger.debug(f"[Admin] ็ปๅฝๆๅ: {request.username}")
|
| 290 |
+
return LoginResponse(success=True, token=session_token, message="็ปๅฝๆๅ")
|
| 291 |
+
|
| 292 |
+
except Exception as e:
|
| 293 |
+
logger.error(f"[Admin] ็ปๅฝๅผๅธธ: {e}")
|
| 294 |
+
raise HTTPException(status_code=500, detail={"error": f"็ปๅฝๅคฑ่ดฅ: {e}", "code": "LOGIN_ERROR"})
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
@router.post("/api/logout")
|
| 298 |
+
async def admin_logout(_: bool = Depends(verify_admin_session), authorization: Optional[str] = Header(None)) -> Dict[str, Any]:
|
| 299 |
+
"""็ฎก็ๅ็ปๅบ"""
|
| 300 |
+
try:
|
| 301 |
+
if authorization and authorization.startswith("Bearer "):
|
| 302 |
+
token = authorization[7:]
|
| 303 |
+
if token in _sessions:
|
| 304 |
+
del _sessions[token]
|
| 305 |
+
logger.debug("[Admin] ็ปๅบๆๅ")
|
| 306 |
+
return {"success": True, "message": "็ปๅบๆๅ"}
|
| 307 |
+
|
| 308 |
+
logger.warning("[Admin] ็ปๅบๅคฑ่ดฅ: ๆ ๆไผ่ฏ")
|
| 309 |
+
return {"success": False, "message": "ๆ ๆ็ไผ่ฏ"}
|
| 310 |
+
|
| 311 |
+
except Exception as e:
|
| 312 |
+
logger.error(f"[Admin] ็ปๅบๅผๅธธ: {e}")
|
| 313 |
+
raise HTTPException(status_code=500, detail={"error": f"็ปๅบๅคฑ่ดฅ: {e}", "code": "LOGOUT_ERROR"})
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
@router.get("/api/tokens", response_model=TokenListResponse)
|
| 317 |
+
async def list_tokens(_: bool = Depends(verify_admin_session)) -> TokenListResponse:
|
| 318 |
+
"""่ทๅTokenๅ่กจ"""
|
| 319 |
+
try:
|
| 320 |
+
logger.debug("[Admin] ่ทๅTokenๅ่กจ")
|
| 321 |
+
|
| 322 |
+
all_tokens = token_manager.get_tokens()
|
| 323 |
+
token_list: List[TokenInfo] = []
|
| 324 |
+
now_ms = int(time.time() * 1000)
|
| 325 |
+
|
| 326 |
+
# ๆฎ้Token
|
| 327 |
+
for token, data in all_tokens.get(TokenType.NORMAL.value, {}).items():
|
| 328 |
+
cooldown_remaining_ms = _get_cooldown_remaining_ms(data, now_ms)
|
| 329 |
+
cooldown_until = data.get("cooldownUntil") if cooldown_remaining_ms else None
|
| 330 |
+
limit_reason = "cooldown" if cooldown_remaining_ms else ""
|
| 331 |
+
if not limit_reason and data.get("remainingQueries", -1) == 0:
|
| 332 |
+
limit_reason = "exhausted"
|
| 333 |
+
token_list.append(TokenInfo(
|
| 334 |
+
token=token,
|
| 335 |
+
token_type="sso",
|
| 336 |
+
created_time=parse_created_time(data.get("createdTime")),
|
| 337 |
+
remaining_queries=data.get("remainingQueries", -1),
|
| 338 |
+
heavy_remaining_queries=data.get("heavyremainingQueries", -1),
|
| 339 |
+
status=get_token_status(data, "sso"),
|
| 340 |
+
tags=data.get("tags", []),
|
| 341 |
+
note=data.get("note", ""),
|
| 342 |
+
cooldown_until=cooldown_until,
|
| 343 |
+
cooldown_remaining=(cooldown_remaining_ms + 999) // 1000 if cooldown_remaining_ms else 0,
|
| 344 |
+
last_failure_time=data.get("lastFailureTime") or None,
|
| 345 |
+
last_failure_reason=data.get("lastFailureReason") or "",
|
| 346 |
+
limit_reason=limit_reason
|
| 347 |
+
))
|
| 348 |
+
|
| 349 |
+
# Super Token
|
| 350 |
+
for token, data in all_tokens.get(TokenType.SUPER.value, {}).items():
|
| 351 |
+
cooldown_remaining_ms = _get_cooldown_remaining_ms(data, now_ms)
|
| 352 |
+
cooldown_until = data.get("cooldownUntil") if cooldown_remaining_ms else None
|
| 353 |
+
limit_reason = "cooldown" if cooldown_remaining_ms else ""
|
| 354 |
+
if not limit_reason and (data.get("remainingQueries", -1) == 0 or data.get("heavyremainingQueries", -1) == 0):
|
| 355 |
+
limit_reason = "exhausted"
|
| 356 |
+
token_list.append(TokenInfo(
|
| 357 |
+
token=token,
|
| 358 |
+
token_type="ssoSuper",
|
| 359 |
+
created_time=parse_created_time(data.get("createdTime")),
|
| 360 |
+
remaining_queries=data.get("remainingQueries", -1),
|
| 361 |
+
heavy_remaining_queries=data.get("heavyremainingQueries", -1),
|
| 362 |
+
status=get_token_status(data, "ssoSuper"),
|
| 363 |
+
tags=data.get("tags", []),
|
| 364 |
+
note=data.get("note", ""),
|
| 365 |
+
cooldown_until=cooldown_until,
|
| 366 |
+
cooldown_remaining=(cooldown_remaining_ms + 999) // 1000 if cooldown_remaining_ms else 0,
|
| 367 |
+
last_failure_time=data.get("lastFailureTime") or None,
|
| 368 |
+
last_failure_reason=data.get("lastFailureReason") or "",
|
| 369 |
+
limit_reason=limit_reason
|
| 370 |
+
))
|
| 371 |
+
|
| 372 |
+
logger.debug(f"[Admin] Tokenๅ่กจ่ทๅๆๅ: {len(token_list)}ไธช")
|
| 373 |
+
return TokenListResponse(success=True, data=token_list, total=len(token_list))
|
| 374 |
+
|
| 375 |
+
except Exception as e:
|
| 376 |
+
logger.error(f"[Admin] ่ทๅTokenๅ่กจๅผๅธธ: {e}")
|
| 377 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "LIST_ERROR"})
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
@router.post("/api/tokens/add")
|
| 381 |
+
async def add_tokens(request: AddTokensRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 382 |
+
"""ๆน้ๆทปๅ Token"""
|
| 383 |
+
try:
|
| 384 |
+
logger.debug(f"[Admin] ๆทปๅ Token: {request.token_type}, {len(request.tokens)}ไธช")
|
| 385 |
+
|
| 386 |
+
token_type = validate_token_type(request.token_type)
|
| 387 |
+
await token_manager.add_token(request.tokens, token_type)
|
| 388 |
+
|
| 389 |
+
logger.debug(f"[Admin] Tokenๆทปๅ ๆๅ: {len(request.tokens)}ไธช")
|
| 390 |
+
return {"success": True, "message": f"ๆๅๆทปๅ {len(request.tokens)} ไธชToken", "count": len(request.tokens)}
|
| 391 |
+
|
| 392 |
+
except HTTPException:
|
| 393 |
+
raise
|
| 394 |
+
except Exception as e:
|
| 395 |
+
logger.error(f"[Admin] Tokenๆทปๅ ๅผๅธธ: {e}")
|
| 396 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆทปๅ ๅคฑ่ดฅ: {e}", "code": "ADD_ERROR"})
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
@router.post("/api/tokens/delete")
|
| 400 |
+
async def delete_tokens(request: DeleteTokensRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 401 |
+
"""ๆน้ๅ ้คToken"""
|
| 402 |
+
try:
|
| 403 |
+
logger.debug(f"[Admin] ๅ ้คToken: {request.token_type}, {len(request.tokens)}ไธช")
|
| 404 |
+
|
| 405 |
+
token_type = validate_token_type(request.token_type)
|
| 406 |
+
await token_manager.delete_token(request.tokens, token_type)
|
| 407 |
+
|
| 408 |
+
logger.debug(f"[Admin] Tokenๅ ้คๆๅ: {len(request.tokens)}ไธช")
|
| 409 |
+
return {"success": True, "message": f"ๆๅๅ ้ค {len(request.tokens)} ไธชToken", "count": len(request.tokens)}
|
| 410 |
+
|
| 411 |
+
except HTTPException:
|
| 412 |
+
raise
|
| 413 |
+
except Exception as e:
|
| 414 |
+
logger.error(f"[Admin] Tokenๅ ้คๅผๅธธ: {e}")
|
| 415 |
+
raise HTTPException(status_code=500, detail={"error": f"ๅ ้คๅคฑ่ดฅ: {e}", "code": "DELETE_ERROR"})
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
@router.get("/api/settings")
|
| 419 |
+
async def get_settings(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 420 |
+
"""่ทๅ้
็ฝฎ"""
|
| 421 |
+
try:
|
| 422 |
+
logger.debug("[Admin] ่ทๅ้
็ฝฎ")
|
| 423 |
+
return {"success": True, "data": {"global": setting.global_config, "grok": setting.grok_config}}
|
| 424 |
+
except Exception as e:
|
| 425 |
+
logger.error(f"[Admin] ่ทๅ้
็ฝฎๅคฑ่ดฅ: {e}")
|
| 426 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "GET_SETTINGS_ERROR"})
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
@router.post("/api/settings")
|
| 430 |
+
async def update_settings(request: UpdateSettingsRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 431 |
+
"""ๆดๆฐ้
็ฝฎ"""
|
| 432 |
+
try:
|
| 433 |
+
logger.debug("[Admin] ๆดๆฐ้
็ฝฎ")
|
| 434 |
+
await setting.save(global_config=request.global_config, grok_config=request.grok_config)
|
| 435 |
+
logger.debug("[Admin] ้
็ฝฎๆดๆฐๆๅ")
|
| 436 |
+
return {"success": True, "message": "้
็ฝฎๆดๆฐๆๅ"}
|
| 437 |
+
except Exception as e:
|
| 438 |
+
logger.error(f"[Admin] ๆดๆฐ้
็ฝฎๅคฑ่ดฅ: {e}")
|
| 439 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆดๆฐๅคฑ่ดฅ: {e}", "code": "UPDATE_SETTINGS_ERROR"})
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
@router.get("/api/cache/size")
|
| 443 |
+
async def get_cache_size(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 444 |
+
"""่ทๅ็ผๅญๅคงๅฐ"""
|
| 445 |
+
try:
|
| 446 |
+
logger.debug("[Admin] ่ทๅ็ผๅญๅคงๅฐ")
|
| 447 |
+
|
| 448 |
+
image_size = _calculate_dir_size(IMAGE_CACHE_DIR) if IMAGE_CACHE_DIR.exists() else 0
|
| 449 |
+
video_size = _calculate_dir_size(VIDEO_CACHE_DIR) if VIDEO_CACHE_DIR.exists() else 0
|
| 450 |
+
total_size = image_size + video_size
|
| 451 |
+
|
| 452 |
+
logger.debug(f"[Admin] ็ผๅญๅคงๅฐ: ๅพ็{_format_size(image_size)}, ่ง้ข{_format_size(video_size)}")
|
| 453 |
+
|
| 454 |
+
return {
|
| 455 |
+
"success": True,
|
| 456 |
+
"data": {
|
| 457 |
+
"image_size": _format_size(image_size),
|
| 458 |
+
"video_size": _format_size(video_size),
|
| 459 |
+
"total_size": _format_size(total_size),
|
| 460 |
+
"image_size_bytes": image_size,
|
| 461 |
+
"video_size_bytes": video_size,
|
| 462 |
+
"total_size_bytes": total_size
|
| 463 |
+
}
|
| 464 |
+
}
|
| 465 |
+
|
| 466 |
+
except Exception as e:
|
| 467 |
+
logger.error(f"[Admin] ่ทๅ็ผๅญๅคงๅฐๅผๅธธ: {e}")
|
| 468 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "CACHE_SIZE_ERROR"})
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
@router.get("/api/cache/list")
|
| 472 |
+
async def list_cache_files(
|
| 473 |
+
cache_type: str = Query("image", alias="type"),
|
| 474 |
+
limit: int = 50,
|
| 475 |
+
offset: int = 0,
|
| 476 |
+
_: bool = Depends(verify_admin_session)
|
| 477 |
+
) -> Dict[str, Any]:
|
| 478 |
+
"""List cached files for admin preview."""
|
| 479 |
+
try:
|
| 480 |
+
cache_type = cache_type.lower()
|
| 481 |
+
if cache_type not in ("image", "video"):
|
| 482 |
+
raise HTTPException(status_code=400, detail={"error": "Invalid cache type", "code": "INVALID_CACHE_TYPE"})
|
| 483 |
+
|
| 484 |
+
if limit < 1:
|
| 485 |
+
limit = 1
|
| 486 |
+
if limit > 200:
|
| 487 |
+
limit = 200
|
| 488 |
+
if offset < 0:
|
| 489 |
+
offset = 0
|
| 490 |
+
|
| 491 |
+
cache_dir = IMAGE_CACHE_DIR if cache_type == "image" else VIDEO_CACHE_DIR
|
| 492 |
+
if not cache_dir.exists():
|
| 493 |
+
return {"success": True, "data": {"total": 0, "items": [], "offset": offset, "limit": limit, "has_more": False}}
|
| 494 |
+
|
| 495 |
+
files = []
|
| 496 |
+
for file_path in cache_dir.iterdir():
|
| 497 |
+
if not file_path.is_file():
|
| 498 |
+
continue
|
| 499 |
+
try:
|
| 500 |
+
stat = file_path.stat()
|
| 501 |
+
except Exception as e:
|
| 502 |
+
logger.warning(f"[Admin] Skip cache file: {file_path.name}, {e}")
|
| 503 |
+
continue
|
| 504 |
+
files.append((file_path, stat.st_mtime, stat.st_size))
|
| 505 |
+
|
| 506 |
+
files.sort(key=lambda item: item[1], reverse=True)
|
| 507 |
+
total = len(files)
|
| 508 |
+
sliced = files[offset:offset + limit]
|
| 509 |
+
|
| 510 |
+
items = [
|
| 511 |
+
{
|
| 512 |
+
"name": file_path.name,
|
| 513 |
+
"size": _format_size(size),
|
| 514 |
+
"size_bytes": size,
|
| 515 |
+
"mtime": int(mtime * 1000),
|
| 516 |
+
"url": f"/images/{file_path.name}",
|
| 517 |
+
"type": cache_type
|
| 518 |
+
}
|
| 519 |
+
for file_path, mtime, size in sliced
|
| 520 |
+
]
|
| 521 |
+
|
| 522 |
+
return {
|
| 523 |
+
"success": True,
|
| 524 |
+
"data": {
|
| 525 |
+
"total": total,
|
| 526 |
+
"items": items,
|
| 527 |
+
"offset": offset,
|
| 528 |
+
"limit": limit,
|
| 529 |
+
"has_more": offset + limit < total
|
| 530 |
+
}
|
| 531 |
+
}
|
| 532 |
+
|
| 533 |
+
except HTTPException:
|
| 534 |
+
raise
|
| 535 |
+
except Exception as e:
|
| 536 |
+
logger.error(f"[Admin] ่ทๅ็ผๅญๅ่กจๅผๅธธ: {e}")
|
| 537 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "CACHE_LIST_ERROR"})
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
@router.post("/api/cache/clear")
|
| 541 |
+
async def clear_cache(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 542 |
+
"""ๆธ
็ๆๆ็ผๅญ"""
|
| 543 |
+
try:
|
| 544 |
+
logger.debug("[Admin] ๆธ
็็ผๅญ")
|
| 545 |
+
|
| 546 |
+
image_count = 0
|
| 547 |
+
video_count = 0
|
| 548 |
+
|
| 549 |
+
# ๆธ
็ๅพ็
|
| 550 |
+
if IMAGE_CACHE_DIR.exists():
|
| 551 |
+
for file_path in IMAGE_CACHE_DIR.iterdir():
|
| 552 |
+
if file_path.is_file():
|
| 553 |
+
try:
|
| 554 |
+
file_path.unlink()
|
| 555 |
+
image_count += 1
|
| 556 |
+
except Exception as e:
|
| 557 |
+
logger.error(f"[Admin] ๅ ้คๅคฑ่ดฅ: {file_path.name}, {e}")
|
| 558 |
+
|
| 559 |
+
# ๆธ
็่ง้ข
|
| 560 |
+
if VIDEO_CACHE_DIR.exists():
|
| 561 |
+
for file_path in VIDEO_CACHE_DIR.iterdir():
|
| 562 |
+
if file_path.is_file():
|
| 563 |
+
try:
|
| 564 |
+
file_path.unlink()
|
| 565 |
+
video_count += 1
|
| 566 |
+
except Exception as e:
|
| 567 |
+
logger.error(f"[Admin] ๅ ้คๅคฑ่ดฅ: {file_path.name}, {e}")
|
| 568 |
+
|
| 569 |
+
total = image_count + video_count
|
| 570 |
+
logger.debug(f"[Admin] ็ผๅญๆธ
็ๅฎๆ: ๅพ็{image_count}, ่ง้ข{video_count}")
|
| 571 |
+
|
| 572 |
+
return {
|
| 573 |
+
"success": True,
|
| 574 |
+
"message": f"ๆๅๆธ
็็ผๅญ๏ผๅ ้คๅพ็ {image_count} ไธช๏ผ่ง้ข {video_count} ไธช๏ผๅ
ฑ {total} ไธชๆไปถ",
|
| 575 |
+
"data": {"deleted_count": total, "image_count": image_count, "video_count": video_count}
|
| 576 |
+
}
|
| 577 |
+
|
| 578 |
+
except Exception as e:
|
| 579 |
+
logger.error(f"[Admin] ๆธ
็็ผๅญๅผๅธธ: {e}")
|
| 580 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆธ
็ๅคฑ่ดฅ: {e}", "code": "CACHE_CLEAR_ERROR"})
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
@router.post("/api/cache/clear/images")
|
| 584 |
+
async def clear_image_cache(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 585 |
+
"""ๆธ
็ๅพ็็ผๅญ"""
|
| 586 |
+
try:
|
| 587 |
+
logger.debug("[Admin] ๆธ
็ๅพ็็ผๅญ")
|
| 588 |
+
|
| 589 |
+
count = 0
|
| 590 |
+
if IMAGE_CACHE_DIR.exists():
|
| 591 |
+
for file_path in IMAGE_CACHE_DIR.iterdir():
|
| 592 |
+
if file_path.is_file():
|
| 593 |
+
try:
|
| 594 |
+
file_path.unlink()
|
| 595 |
+
count += 1
|
| 596 |
+
except Exception as e:
|
| 597 |
+
logger.error(f"[Admin] ๅ ้คๅคฑ่ดฅ: {file_path.name}, {e}")
|
| 598 |
+
|
| 599 |
+
logger.debug(f"[Admin] ๅพ็็ผๅญๆธ
็ๅฎๆ: {count}ไธช")
|
| 600 |
+
return {"success": True, "message": f"ๆๅๆธ
็ๅพ็็ผๅญ๏ผๅ ้ค {count} ไธชๆไปถ", "data": {"deleted_count": count, "type": "images"}}
|
| 601 |
+
|
| 602 |
+
except Exception as e:
|
| 603 |
+
logger.error(f"[Admin] ๆธ
็ๅพ็็ผๅญๅผๅธธ: {e}")
|
| 604 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆธ
็ๅคฑ่ดฅ: {e}", "code": "IMAGE_CACHE_CLEAR_ERROR"})
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
@router.post("/api/cache/clear/videos")
|
| 608 |
+
async def clear_video_cache(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 609 |
+
"""ๆธ
็่ง้ข็ผๅญ"""
|
| 610 |
+
try:
|
| 611 |
+
logger.debug("[Admin] ๆธ
็่ง้ข็ผๅญ")
|
| 612 |
+
|
| 613 |
+
count = 0
|
| 614 |
+
if VIDEO_CACHE_DIR.exists():
|
| 615 |
+
for file_path in VIDEO_CACHE_DIR.iterdir():
|
| 616 |
+
if file_path.is_file():
|
| 617 |
+
try:
|
| 618 |
+
file_path.unlink()
|
| 619 |
+
count += 1
|
| 620 |
+
except Exception as e:
|
| 621 |
+
logger.error(f"[Admin] ๅ ้คๅคฑ่ดฅ: {file_path.name}, {e}")
|
| 622 |
+
|
| 623 |
+
logger.debug(f"[Admin] ่ง้ข็ผๅญๆธ
็ๅฎๆ: {count}ไธช")
|
| 624 |
+
return {"success": True, "message": f"ๆๅๆธ
็่ง้ข็ผๅญ๏ผๅ ้ค {count} ไธชๆไปถ", "data": {"deleted_count": count, "type": "videos"}}
|
| 625 |
+
|
| 626 |
+
except Exception as e:
|
| 627 |
+
logger.error(f"[Admin] ๆธ
็่ง้ข็ผๅญๅผๅธธ: {e}")
|
| 628 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆธ
็ๅคฑ่ดฅ: {e}", "code": "VIDEO_CACHE_CLEAR_ERROR"})
|
| 629 |
+
|
| 630 |
+
|
| 631 |
+
@router.get("/api/stats")
|
| 632 |
+
async def get_stats(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 633 |
+
"""่ทๅ็ป่ฎกไฟกๆฏ"""
|
| 634 |
+
try:
|
| 635 |
+
logger.debug("[Admin] ๅผๅง่ทๅ็ป่ฎกไฟกๆฏ")
|
| 636 |
+
|
| 637 |
+
all_tokens = token_manager.get_tokens()
|
| 638 |
+
normal_stats = calculate_token_stats(all_tokens.get(TokenType.NORMAL.value, {}), "normal")
|
| 639 |
+
super_stats = calculate_token_stats(all_tokens.get(TokenType.SUPER.value, {}), "super")
|
| 640 |
+
total = normal_stats["total"] + super_stats["total"]
|
| 641 |
+
|
| 642 |
+
logger.debug(f"[Admin] ็ป่ฎกไฟกๆฏ่ทๅๆๅ - ๆฎ้Token: {normal_stats['total']}, Super Token: {super_stats['total']}, ๆป่ฎก: {total}")
|
| 643 |
+
return {"success": True, "data": {"normal": normal_stats, "super": super_stats, "total": total}}
|
| 644 |
+
|
| 645 |
+
except Exception as e:
|
| 646 |
+
logger.error(f"[Admin] ่ทๅ็ป่ฎกไฟกๆฏๅผๅธธ: {e}")
|
| 647 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "STATS_ERROR"})
|
| 648 |
+
|
| 649 |
+
|
| 650 |
+
@router.get("/api/storage/mode")
|
| 651 |
+
async def get_storage_mode(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 652 |
+
"""่ทๅๅญๅจๆจกๅผ"""
|
| 653 |
+
try:
|
| 654 |
+
logger.debug("[Admin] ่ทๅๅญๅจๆจกๅผ")
|
| 655 |
+
import os
|
| 656 |
+
mode = os.getenv("STORAGE_MODE", "file").upper()
|
| 657 |
+
return {"success": True, "data": {"mode": mode}}
|
| 658 |
+
except Exception as e:
|
| 659 |
+
logger.error(f"[Admin] ่ทๅๅญๅจๆจกๅผๅผๅธธ: {e}")
|
| 660 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "STORAGE_MODE_ERROR"})
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
@router.post("/api/tokens/tags")
|
| 664 |
+
async def update_token_tags(request: UpdateTokenTagsRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 665 |
+
"""ๆดๆฐTokenๆ ็ญพ"""
|
| 666 |
+
try:
|
| 667 |
+
logger.debug(f"[Admin] ๆดๆฐTokenๆ ็ญพ: {request.token[:10]}..., {request.tags}")
|
| 668 |
+
|
| 669 |
+
token_type = validate_token_type(request.token_type)
|
| 670 |
+
await token_manager.update_token_tags(request.token, token_type, request.tags)
|
| 671 |
+
|
| 672 |
+
logger.debug(f"[Admin] Tokenๆ ็ญพๆดๆฐๆๅ: {request.token[:10]}...")
|
| 673 |
+
return {"success": True, "message": "ๆ ็ญพๆดๆฐๆๅ", "tags": request.tags}
|
| 674 |
+
|
| 675 |
+
except HTTPException:
|
| 676 |
+
raise
|
| 677 |
+
except Exception as e:
|
| 678 |
+
logger.error(f"[Admin] Tokenๆ ็ญพๆดๆฐๅผๅธธ: {e}")
|
| 679 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆดๆฐๅคฑ่ดฅ: {e}", "code": "UPDATE_TAGS_ERROR"})
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
@router.get("/api/tokens/tags/all")
|
| 683 |
+
async def get_all_tags(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 684 |
+
"""่ทๅๆๆๆ ็ญพ"""
|
| 685 |
+
try:
|
| 686 |
+
logger.debug("[Admin] ่ทๅๆๆๆ ็ญพ")
|
| 687 |
+
|
| 688 |
+
all_tokens = token_manager.get_tokens()
|
| 689 |
+
tags_set = set()
|
| 690 |
+
|
| 691 |
+
for token_type_data in all_tokens.values():
|
| 692 |
+
for token_data in token_type_data.values():
|
| 693 |
+
tags = token_data.get("tags", [])
|
| 694 |
+
if isinstance(tags, list):
|
| 695 |
+
tags_set.update(tags)
|
| 696 |
+
|
| 697 |
+
tags_list = sorted(list(tags_set))
|
| 698 |
+
logger.debug(f"[Admin] ๆ ็ญพ่ทๅๆๅ: {len(tags_list)}ไธช")
|
| 699 |
+
return {"success": True, "data": tags_list}
|
| 700 |
+
|
| 701 |
+
except Exception as e:
|
| 702 |
+
logger.error(f"[Admin] ่ทๅๆ ็ญพๅผๅธธ: {e}")
|
| 703 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}", "code": "GET_TAGS_ERROR"})
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
@router.post("/api/tokens/note")
|
| 707 |
+
async def update_token_note(request: UpdateTokenNoteRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 708 |
+
"""ๆดๆฐTokenๅคๆณจ"""
|
| 709 |
+
try:
|
| 710 |
+
logger.debug(f"[Admin] ๆดๆฐTokenๅคๆณจ: {request.token[:10]}...")
|
| 711 |
+
|
| 712 |
+
token_type = validate_token_type(request.token_type)
|
| 713 |
+
await token_manager.update_token_note(request.token, token_type, request.note)
|
| 714 |
+
|
| 715 |
+
logger.debug(f"[Admin] Tokenๅคๆณจๆดๆฐๆๅ: {request.token[:10]}...")
|
| 716 |
+
return {"success": True, "message": "ๅคๆณจๆดๆฐๆๅ", "note": request.note}
|
| 717 |
+
|
| 718 |
+
except HTTPException:
|
| 719 |
+
raise
|
| 720 |
+
except Exception as e:
|
| 721 |
+
logger.error(f"[Admin] Tokenๅคๆณจๆดๆฐๅผๅธธ: {e}")
|
| 722 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆดๆฐๅคฑ่ดฅ: {e}", "code": "UPDATE_NOTE_ERROR"})
|
| 723 |
+
|
| 724 |
+
|
| 725 |
+
@router.post("/api/tokens/test")
|
| 726 |
+
async def test_token(request: TestTokenRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 727 |
+
"""ๆต่ฏTokenๅฏ็จๆง"""
|
| 728 |
+
try:
|
| 729 |
+
logger.debug(f"[Admin] ๆต่ฏToken: {request.token[:10]}...")
|
| 730 |
+
|
| 731 |
+
token_type = validate_token_type(request.token_type)
|
| 732 |
+
auth_token = f"sso-rw={request.token};sso={request.token}"
|
| 733 |
+
|
| 734 |
+
result = await token_manager.check_limits(auth_token, "grok-4-fast")
|
| 735 |
+
|
| 736 |
+
if result:
|
| 737 |
+
logger.debug(f"[Admin] Tokenๆต่ฏๆๅ: {request.token[:10]}...")
|
| 738 |
+
return {
|
| 739 |
+
"success": True,
|
| 740 |
+
"message": "Tokenๆๆ",
|
| 741 |
+
"data": {
|
| 742 |
+
"valid": True,
|
| 743 |
+
"remaining_queries": result.get("remainingTokens", -1),
|
| 744 |
+
"limit": result.get("limit", -1)
|
| 745 |
+
}
|
| 746 |
+
}
|
| 747 |
+
else:
|
| 748 |
+
logger.warning(f"[Admin] Tokenๆต่ฏๅคฑ่ดฅ: {request.token[:10]}...")
|
| 749 |
+
|
| 750 |
+
all_tokens = token_manager.get_tokens()
|
| 751 |
+
token_data = all_tokens.get(token_type.value, {}).get(request.token)
|
| 752 |
+
|
| 753 |
+
if token_data:
|
| 754 |
+
if token_data.get("status") == "expired":
|
| 755 |
+
return {"success": False, "message": "Tokenๅทฒๅคฑๆ", "data": {"valid": False, "error_type": "expired", "error_code": 401}}
|
| 756 |
+
cooldown_remaining_ms = _get_cooldown_remaining_ms(token_data)
|
| 757 |
+
if cooldown_remaining_ms:
|
| 758 |
+
return {
|
| 759 |
+
"success": False,
|
| 760 |
+
"message": "Tokenๅคไบๅทๅดไธญ",
|
| 761 |
+
"data": {
|
| 762 |
+
"valid": False,
|
| 763 |
+
"error_type": "cooldown",
|
| 764 |
+
"error_code": 429,
|
| 765 |
+
"cooldown_remaining": (cooldown_remaining_ms + 999) // 1000
|
| 766 |
+
}
|
| 767 |
+
}
|
| 768 |
+
|
| 769 |
+
exhausted = token_data.get("remainingQueries") == 0
|
| 770 |
+
if token_type == TokenType.SUPER and token_data.get("heavyremainingQueries") == 0:
|
| 771 |
+
exhausted = True
|
| 772 |
+
if exhausted:
|
| 773 |
+
return {
|
| 774 |
+
"success": False,
|
| 775 |
+
"message": "Token้ขๅบฆ่ๅฐฝ",
|
| 776 |
+
"data": {"valid": False, "error_type": "exhausted", "error_code": "quota_exhausted"}
|
| 777 |
+
}
|
| 778 |
+
else:
|
| 779 |
+
return {"success": False, "message": "ๆๅกๅจ่ขซblockๆ็ฝ็ป้่ฏฏ", "data": {"valid": False, "error_type": "blocked", "error_code": 403}}
|
| 780 |
+
else:
|
| 781 |
+
return {"success": False, "message": "Tokenๆฐๆฎๅผๅธธ", "data": {"valid": False, "error_type": "unknown", "error_code": "data_error"}}
|
| 782 |
+
|
| 783 |
+
except HTTPException:
|
| 784 |
+
raise
|
| 785 |
+
except Exception as e:
|
| 786 |
+
logger.error(f"[Admin] Tokenๆต่ฏๅผๅธธ: {e}")
|
| 787 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆต่ฏๅคฑ่ดฅ: {e}", "code": "TEST_TOKEN_ERROR"})
|
| 788 |
+
|
| 789 |
+
|
| 790 |
+
@router.post("/api/tokens/refresh-all")
|
| 791 |
+
async def refresh_all_tokens(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 792 |
+
"""ไธ้ฎๅทๆฐๆๆToken็ๅฉไฝๆฌกๆฐ๏ผๅๅฐๆง่ก๏ผ"""
|
| 793 |
+
import asyncio
|
| 794 |
+
|
| 795 |
+
try:
|
| 796 |
+
# ๆฃๆฅๆฏๅฆๅทฒๅจๅทๆฐ
|
| 797 |
+
progress = token_manager.get_refresh_progress()
|
| 798 |
+
if progress.get("running"):
|
| 799 |
+
return {
|
| 800 |
+
"success": False,
|
| 801 |
+
"message": "ๅทๆฐไปปๅกๆญฃๅจ่ฟ่กไธญ",
|
| 802 |
+
"data": progress
|
| 803 |
+
}
|
| 804 |
+
|
| 805 |
+
# ๅๅฐๅฏๅจๅทๆฐไปปๅก
|
| 806 |
+
logger.info("[Admin] ๅฏๅจๅๅฐๅทๆฐไปปๅก")
|
| 807 |
+
asyncio.create_task(token_manager.refresh_all_limits())
|
| 808 |
+
|
| 809 |
+
# ็ซๅณ่ฟๅ๏ผ่ฎฉๅ็ซฏ่ฝฎ่ฏข่ฟๅบฆ
|
| 810 |
+
return {
|
| 811 |
+
"success": True,
|
| 812 |
+
"message": "ๅทๆฐไปปๅกๅทฒๅฏๅจ",
|
| 813 |
+
"data": {"started": True}
|
| 814 |
+
}
|
| 815 |
+
except Exception as e:
|
| 816 |
+
logger.error(f"[Admin] ๅทๆฐTokenๅผๅธธ: {e}")
|
| 817 |
+
raise HTTPException(status_code=500, detail={"error": f"ๅทๆฐๅคฑ่ดฅ: {e}", "code": "REFRESH_ALL_ERROR"})
|
| 818 |
+
|
| 819 |
+
|
| 820 |
+
@router.get("/api/tokens/refresh-progress")
|
| 821 |
+
async def get_refresh_progress(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 822 |
+
"""่ทๅTokenๅทๆฐ่ฟๅบฆ"""
|
| 823 |
+
try:
|
| 824 |
+
progress = token_manager.get_refresh_progress()
|
| 825 |
+
return {"success": True, "data": progress}
|
| 826 |
+
except Exception as e:
|
| 827 |
+
logger.error(f"[Admin] ่ทๅๅทๆฐ่ฟๅบฆๅผๅธธ: {e}")
|
| 828 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅ่ฟๅบฆๅคฑ่ดฅ: {e}"})
|
| 829 |
+
|
| 830 |
+
|
| 831 |
+
@router.get("/api/request-stats")
|
| 832 |
+
async def get_request_stats(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 833 |
+
"""่ทๅ่ฏทๆฑ็ป่ฎกๆฐๆฎ"""
|
| 834 |
+
try:
|
| 835 |
+
stats = request_stats.get_stats(hours=24, days=7)
|
| 836 |
+
return {"success": True, "data": stats}
|
| 837 |
+
except Exception as e:
|
| 838 |
+
logger.error(f"[Admin] ่ทๅ่ฏทๆฑ็ป่ฎกๅผๅธธ: {e}")
|
| 839 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅ็ป่ฎกๅคฑ่ดฅ: {e}"})
|
| 840 |
+
|
| 841 |
+
|
| 842 |
+
# === API Key ็ฎก็ ===
|
| 843 |
+
|
| 844 |
+
class AddKeyRequest(BaseModel):
|
| 845 |
+
name: str
|
| 846 |
+
|
| 847 |
+
|
| 848 |
+
class UpdateKeyNameRequest(BaseModel):
|
| 849 |
+
key: str
|
| 850 |
+
name: str
|
| 851 |
+
|
| 852 |
+
|
| 853 |
+
class UpdateKeyStatusRequest(BaseModel):
|
| 854 |
+
key: str
|
| 855 |
+
is_active: bool
|
| 856 |
+
|
| 857 |
+
|
| 858 |
+
class BatchAddKeyRequest(BaseModel):
|
| 859 |
+
name_prefix: str
|
| 860 |
+
count: int
|
| 861 |
+
|
| 862 |
+
|
| 863 |
+
class BatchDeleteKeyRequest(BaseModel):
|
| 864 |
+
keys: List[str]
|
| 865 |
+
|
| 866 |
+
|
| 867 |
+
class BatchUpdateKeyStatusRequest(BaseModel):
|
| 868 |
+
keys: List[str]
|
| 869 |
+
is_active: bool
|
| 870 |
+
|
| 871 |
+
|
| 872 |
+
@router.get("/api/keys")
|
| 873 |
+
async def list_keys(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 874 |
+
"""่ทๅ Key ๅ่กจ"""
|
| 875 |
+
try:
|
| 876 |
+
from app.services.api_keys import api_key_manager
|
| 877 |
+
if not api_key_manager._loaded:
|
| 878 |
+
await api_key_manager.init()
|
| 879 |
+
|
| 880 |
+
keys = api_key_manager.get_all_keys()
|
| 881 |
+
|
| 882 |
+
# ๆทปๅ ้ป่ฎค Key (ๅฏ้)
|
| 883 |
+
global_key = setting.grok_config.get("api_key")
|
| 884 |
+
result_keys = []
|
| 885 |
+
|
| 886 |
+
# ่ฝฌๆขๅนถ่ฑๆ
|
| 887 |
+
for k in keys:
|
| 888 |
+
result_keys.append({
|
| 889 |
+
**k,
|
| 890 |
+
"display_key": f"{k['key'][:6]}...{k['key'][-4:]}"
|
| 891 |
+
})
|
| 892 |
+
|
| 893 |
+
return {
|
| 894 |
+
"success": True,
|
| 895 |
+
"data": result_keys,
|
| 896 |
+
"global_key_set": bool(global_key)
|
| 897 |
+
}
|
| 898 |
+
except Exception as e:
|
| 899 |
+
logger.error(f"[Admin] ่ทๅKeyๅ่กจๅคฑ่ดฅ: {e}")
|
| 900 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}"})
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
@router.post("/api/keys/add")
|
| 904 |
+
async def add_key(request: AddKeyRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 905 |
+
"""ๆทปๅ Key"""
|
| 906 |
+
try:
|
| 907 |
+
from app.services.api_keys import api_key_manager
|
| 908 |
+
new_key = await api_key_manager.add_key(request.name)
|
| 909 |
+
return {"success": True, "data": new_key, "message": "Keyๅๅปบๆๅ"}
|
| 910 |
+
except Exception as e:
|
| 911 |
+
logger.error(f"[Admin] ๆทปๅ Keyๅคฑ่ดฅ: {e}")
|
| 912 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆทปๅ ๅคฑ่ดฅ: {e}"})
|
| 913 |
+
|
| 914 |
+
|
| 915 |
+
@router.post("/api/keys/delete")
|
| 916 |
+
async def delete_key(request: Dict[str, str], _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 917 |
+
"""ๅ ้ค Key"""
|
| 918 |
+
try:
|
| 919 |
+
from app.services.api_keys import api_key_manager
|
| 920 |
+
key = request.get("key")
|
| 921 |
+
if not key:
|
| 922 |
+
raise ValueError("Key cannot be empty")
|
| 923 |
+
|
| 924 |
+
if await api_key_manager.delete_key(key):
|
| 925 |
+
return {"success": True, "message": "Keyๅ ้คๆๅ"}
|
| 926 |
+
return {"success": False, "message": "Keyไธๅญๅจ"}
|
| 927 |
+
except Exception as e:
|
| 928 |
+
logger.error(f"[Admin] ๅ ้คKeyๅคฑ่ดฅ: {e}")
|
| 929 |
+
raise HTTPException(status_code=500, detail={"error": f"ๅ ้คๅคฑ่ดฅ: {e}"})
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
@router.post("/api/keys/status")
|
| 933 |
+
async def update_key_status(request: UpdateKeyStatusRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 934 |
+
"""ๆดๆฐ Key ็ถๆ"""
|
| 935 |
+
try:
|
| 936 |
+
from app.services.api_keys import api_key_manager
|
| 937 |
+
if await api_key_manager.update_key_status(request.key, request.is_active):
|
| 938 |
+
return {"success": True, "message": "็ถๆๆดๆฐๆๅ"}
|
| 939 |
+
return {"success": False, "message": "Keyไธๅญๅจ"}
|
| 940 |
+
except Exception as e:
|
| 941 |
+
logger.error(f"[Admin] ๆดๆฐKey็ถๆๅคฑ่ดฅ: {e}")
|
| 942 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆดๆฐๅคฑ่ดฅ: {e}"})
|
| 943 |
+
|
| 944 |
+
|
| 945 |
+
@router.post("/api/keys/name")
|
| 946 |
+
async def update_key_name(request: UpdateKeyNameRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 947 |
+
"""ๆดๆฐ Key ๅคๆณจ"""
|
| 948 |
+
try:
|
| 949 |
+
from app.services.api_keys import api_key_manager
|
| 950 |
+
if await api_key_manager.update_key_name(request.key, request.name):
|
| 951 |
+
return {"success": True, "message": "ๅคๆณจๆดๆฐๆๅ"}
|
| 952 |
+
return {"success": False, "message": "Keyไธๅญๅจ"}
|
| 953 |
+
except Exception as e:
|
| 954 |
+
logger.error(f"[Admin] ๆดๆฐKeyๅคๆณจๅคฑ่ดฅ: {e}")
|
| 955 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆดๆฐๅคฑ่ดฅ: {e}"})
|
| 956 |
+
|
| 957 |
+
|
| 958 |
+
@router.post("/api/keys/batch-add")
|
| 959 |
+
async def batch_add_keys(request: BatchAddKeyRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 960 |
+
"""ๆน้ๆทปๅ Key"""
|
| 961 |
+
try:
|
| 962 |
+
from app.services.api_keys import api_key_manager
|
| 963 |
+
new_keys = await api_key_manager.batch_add_keys(request.name_prefix, request.count)
|
| 964 |
+
return {"success": True, "data": new_keys, "message": f"ๆๅๅๅปบ {len(new_keys)} ไธช Key"}
|
| 965 |
+
except Exception as e:
|
| 966 |
+
logger.error(f"[Admin] ๆน้ๆทปๅ Keyๅคฑ่ดฅ: {e}")
|
| 967 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆน้ๆทปๅ ๅคฑ่ดฅ: {e}"})
|
| 968 |
+
|
| 969 |
+
|
| 970 |
+
@router.post("/api/keys/batch-delete")
|
| 971 |
+
async def batch_delete_keys(request: BatchDeleteKeyRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 972 |
+
"""ๆน้ๅ ้ค Key"""
|
| 973 |
+
try:
|
| 974 |
+
from app.services.api_keys import api_key_manager
|
| 975 |
+
deleted_count = await api_key_manager.batch_delete_keys(request.keys)
|
| 976 |
+
return {"success": True, "message": f"ๆๅๅ ้ค {deleted_count} ไธช Key"}
|
| 977 |
+
except Exception as e:
|
| 978 |
+
logger.error(f"[Admin] ๆน้ๅ ้คKeyๅคฑ่ดฅ: {e}")
|
| 979 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆน้ๅ ้คๅคฑ่ดฅ: {e}"})
|
| 980 |
+
|
| 981 |
+
|
| 982 |
+
@router.post("/api/keys/batch-status")
|
| 983 |
+
async def batch_update_key_status(request: BatchUpdateKeyStatusRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 984 |
+
"""ๆน้ๆดๆฐ Key ็ถๆ"""
|
| 985 |
+
try:
|
| 986 |
+
from app.services.api_keys import api_key_manager
|
| 987 |
+
updated_count = await api_key_manager.batch_update_keys_status(request.keys, request.is_active)
|
| 988 |
+
return {"success": True, "message": f"ๆๅๆดๆฐ {updated_count} ไธช Key ็ถๆ"}
|
| 989 |
+
except Exception as e:
|
| 990 |
+
logger.error(f"[Admin] ๆน้ๆดๆฐKey็ถๆๅคฑ่ดฅ: {e}")
|
| 991 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆน้ๆดๆฐๅคฑ่ดฅ: {e}"})
|
| 992 |
+
|
| 993 |
+
|
| 994 |
+
# === ๆฅๅฟๅฎก่ฎก ===
|
| 995 |
+
|
| 996 |
+
@router.get("/api/logs")
|
| 997 |
+
async def get_logs(limit: int = 1000, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 998 |
+
"""่ทๅ่ฏทๆฑๆฅๅฟ"""
|
| 999 |
+
try:
|
| 1000 |
+
from app.services.request_logger import request_logger
|
| 1001 |
+
logs = await request_logger.get_logs(limit)
|
| 1002 |
+
return {"success": True, "data": logs}
|
| 1003 |
+
except Exception as e:
|
| 1004 |
+
logger.error(f"[Admin] ่ทๅๆฅๅฟๅคฑ่ดฅ: {e}")
|
| 1005 |
+
raise HTTPException(status_code=500, detail={"error": f"่ทๅๅคฑ่ดฅ: {e}"})
|
| 1006 |
+
|
| 1007 |
+
@router.post("/api/logs/clear")
|
| 1008 |
+
async def clear_logs(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
|
| 1009 |
+
"""ๆธ
็ฉบๆฅๅฟ"""
|
| 1010 |
+
try:
|
| 1011 |
+
from app.services.request_logger import request_logger
|
| 1012 |
+
await request_logger.clear_logs()
|
| 1013 |
+
return {"success": True, "message": "ๆฅๅฟๅทฒๆธ
็ฉบ"}
|
| 1014 |
+
except Exception as e:
|
| 1015 |
+
logger.error(f"[Admin] ๆธ
็ฉบๆฅๅฟๅคฑ่ดฅ: {e}")
|
| 1016 |
+
raise HTTPException(status_code=500, detail={"error": f"ๆธ
็ฉบๅคฑ่ดฅ: {e}"})
|
| 1017 |
+
|
app/api/v1/chat.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""่ๅคฉAPI่ทฏ็ฑ - OpenAIๅ
ผๅฎน็่ๅคฉๆฅๅฃ"""
|
| 2 |
+
|
| 3 |
+
import time
|
| 4 |
+
from fastapi import APIRouter, Depends, HTTPException, Request
|
| 5 |
+
from typing import Optional, Dict, Any
|
| 6 |
+
from fastapi.responses import StreamingResponse
|
| 7 |
+
|
| 8 |
+
from app.core.auth import auth_manager
|
| 9 |
+
from app.core.exception import GrokApiException
|
| 10 |
+
from app.core.logger import logger
|
| 11 |
+
from app.services.grok.client import GrokClient
|
| 12 |
+
from app.models.openai_schema import OpenAIChatRequest
|
| 13 |
+
from app.services.request_stats import request_stats
|
| 14 |
+
from app.services.request_logger import request_logger
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
router = APIRouter(prefix="/chat", tags=["่ๅคฉ"])
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@router.post("/completions", response_model=None)
|
| 21 |
+
async def chat_completions(
|
| 22 |
+
request: Request,
|
| 23 |
+
body: OpenAIChatRequest,
|
| 24 |
+
auth_info: Dict[str, Any] = Depends(auth_manager.verify)
|
| 25 |
+
):
|
| 26 |
+
"""ๅๅปบ่ๅคฉ่กฅๅ
จ๏ผๆฏๆๆตๅผๅ้ๆตๅผ๏ผ"""
|
| 27 |
+
start_time = time.time()
|
| 28 |
+
model = body.model
|
| 29 |
+
ip = request.client.host
|
| 30 |
+
key_name = auth_info.get("name", "Unknown")
|
| 31 |
+
|
| 32 |
+
status_code = 200
|
| 33 |
+
error_msg = ""
|
| 34 |
+
|
| 35 |
+
try:
|
| 36 |
+
logger.info(f"[Chat] ๆถๅฐ่ๅคฉ่ฏทๆฑ: {key_name} @ {ip}")
|
| 37 |
+
|
| 38 |
+
# ่ฐ็จGrokๅฎขๆท็ซฏ
|
| 39 |
+
result = await GrokClient.openai_to_grok(body.model_dump())
|
| 40 |
+
|
| 41 |
+
# ่ฎฐๅฝๆๅ็ป่ฎก
|
| 42 |
+
await request_stats.record_request(model, success=True)
|
| 43 |
+
|
| 44 |
+
# ๆตๅผๅๅบ
|
| 45 |
+
if body.stream:
|
| 46 |
+
async def stream_wrapper():
|
| 47 |
+
try:
|
| 48 |
+
async for chunk in result:
|
| 49 |
+
yield chunk
|
| 50 |
+
finally:
|
| 51 |
+
# ๆตๅผ็ปๆ่ฎฐๅฝๆฅๅฟ
|
| 52 |
+
duration = time.time() - start_time
|
| 53 |
+
await request_logger.add_log(ip, model, duration, 200, key_name)
|
| 54 |
+
|
| 55 |
+
return StreamingResponse(
|
| 56 |
+
content=stream_wrapper(),
|
| 57 |
+
media_type="text/event-stream",
|
| 58 |
+
headers={
|
| 59 |
+
"Cache-Control": "no-cache",
|
| 60 |
+
"Connection": "keep-alive",
|
| 61 |
+
"X-Accel-Buffering": "no"
|
| 62 |
+
}
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
# ้ๆตๅผๅๅบ - ่ฎฐๅฝๆฅๅฟ
|
| 66 |
+
duration = time.time() - start_time
|
| 67 |
+
await request_logger.add_log(ip, model, duration, 200, key_name)
|
| 68 |
+
return result
|
| 69 |
+
|
| 70 |
+
except GrokApiException as e:
|
| 71 |
+
status_code = e.status_code or 500
|
| 72 |
+
error_msg = str(e)
|
| 73 |
+
await request_stats.record_request(model, success=False)
|
| 74 |
+
logger.error(f"[Chat] Grok API้่ฏฏ: {e} - ่ฏฆๆ
: {e.details}")
|
| 75 |
+
|
| 76 |
+
duration = time.time() - start_time
|
| 77 |
+
await request_logger.add_log(ip, model, duration, status_code, key_name, error=error_msg)
|
| 78 |
+
|
| 79 |
+
raise HTTPException(
|
| 80 |
+
status_code=status_code,
|
| 81 |
+
detail={
|
| 82 |
+
"error": {
|
| 83 |
+
"message": error_msg,
|
| 84 |
+
"type": e.error_code or "grok_api_error",
|
| 85 |
+
"code": e.error_code or "unknown"
|
| 86 |
+
}
|
| 87 |
+
}
|
| 88 |
+
)
|
| 89 |
+
except Exception as e:
|
| 90 |
+
status_code = 500
|
| 91 |
+
error_msg = str(e)
|
| 92 |
+
await request_stats.record_request(model, success=False)
|
| 93 |
+
logger.error(f"[Chat] ๅค็ๅคฑ่ดฅ: {e}")
|
| 94 |
+
|
| 95 |
+
duration = time.time() - start_time
|
| 96 |
+
await request_logger.add_log(ip, model, duration, status_code, key_name, error=error_msg)
|
| 97 |
+
|
| 98 |
+
raise HTTPException(
|
| 99 |
+
status_code=500,
|
| 100 |
+
detail={
|
| 101 |
+
"error": {
|
| 102 |
+
"message": "ๆๅกๅจๅ
้จ้่ฏฏ",
|
| 103 |
+
"type": "internal_error",
|
| 104 |
+
"code": "internal_server_error"
|
| 105 |
+
}
|
| 106 |
+
}
|
| 107 |
+
)
|
app/api/v1/images.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ๅพ็ๆๅกAPI - ๆไพ็ผๅญ็ๅพ็ๅ่ง้ขๆไปถ"""
|
| 2 |
+
|
| 3 |
+
from fastapi import APIRouter, HTTPException
|
| 4 |
+
from fastapi.responses import FileResponse
|
| 5 |
+
|
| 6 |
+
from app.core.logger import logger
|
| 7 |
+
from app.services.grok.cache import image_cache_service, video_cache_service
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
router = APIRouter()
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@router.get("/images/{img_path:path}")
|
| 14 |
+
async def get_image(img_path: str):
|
| 15 |
+
"""่ทๅ็ผๅญ็ๅพ็ๆ่ง้ข
|
| 16 |
+
|
| 17 |
+
Args:
|
| 18 |
+
img_path: ๆไปถ่ทฏๅพ๏ผๆ ผๅผ๏ผusers-xxx-generated-xxx-image.jpg๏ผ
|
| 19 |
+
"""
|
| 20 |
+
try:
|
| 21 |
+
# ่ฝฌๆข่ทฏๅพ๏ผ็ญๆจช็บฟโๆๆ ๏ผ
|
| 22 |
+
original_path = "/" + img_path.replace('-', '/')
|
| 23 |
+
|
| 24 |
+
# ๅคๆญ็ฑปๅ
|
| 25 |
+
is_video = any(original_path.lower().endswith(ext) for ext in ['.mp4', '.webm', '.mov', '.avi'])
|
| 26 |
+
|
| 27 |
+
if is_video:
|
| 28 |
+
cache_path = video_cache_service.get_cached(original_path)
|
| 29 |
+
media_type = "video/mp4"
|
| 30 |
+
else:
|
| 31 |
+
cache_path = image_cache_service.get_cached(original_path)
|
| 32 |
+
media_type = "image/jpeg"
|
| 33 |
+
|
| 34 |
+
if cache_path and cache_path.exists():
|
| 35 |
+
logger.debug(f"[MediaAPI] ่ฟๅ็ผๅญ: {cache_path}")
|
| 36 |
+
return FileResponse(
|
| 37 |
+
path=str(cache_path),
|
| 38 |
+
media_type=media_type,
|
| 39 |
+
headers={
|
| 40 |
+
"Cache-Control": "public, max-age=86400",
|
| 41 |
+
"Access-Control-Allow-Origin": "*"
|
| 42 |
+
}
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
# ๆไปถไธๅญๅจ
|
| 46 |
+
logger.warning(f"[MediaAPI] ๆชๆพๅฐ: {original_path}")
|
| 47 |
+
raise HTTPException(status_code=404, detail="File not found")
|
| 48 |
+
|
| 49 |
+
except HTTPException:
|
| 50 |
+
raise
|
| 51 |
+
except Exception as e:
|
| 52 |
+
logger.error(f"[MediaAPI] ่ทๅๅคฑ่ดฅ: {e}")
|
| 53 |
+
raise HTTPException(status_code=500, detail=str(e))
|
app/api/v1/models.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ๆจกๅๆฅๅฃ - OpenAIๅ
ผๅฎน็ๆจกๅๅ่กจ็ซฏ็น"""
|
| 2 |
+
|
| 3 |
+
import time
|
| 4 |
+
from typing import Dict, Any, List, Optional
|
| 5 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 6 |
+
|
| 7 |
+
from app.models.grok_models import Models
|
| 8 |
+
from app.core.auth import auth_manager
|
| 9 |
+
from app.core.logger import logger
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
router = APIRouter(tags=["ๆจกๅ"])
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@router.get("/models")
|
| 16 |
+
async def list_models(_: Optional[str] = Depends(auth_manager.verify)) -> Dict[str, Any]:
|
| 17 |
+
"""่ทๅๅฏ็จๆจกๅๅ่กจ"""
|
| 18 |
+
try:
|
| 19 |
+
logger.debug("[Models] ่ฏทๆฑๆจกๅๅ่กจ")
|
| 20 |
+
|
| 21 |
+
timestamp = int(time.time())
|
| 22 |
+
model_data: List[Dict[str, Any]] = []
|
| 23 |
+
|
| 24 |
+
for model in Models:
|
| 25 |
+
model_id = model.value
|
| 26 |
+
config = Models.get_model_info(model_id)
|
| 27 |
+
|
| 28 |
+
model_info = {
|
| 29 |
+
"id": model_id,
|
| 30 |
+
"object": "model",
|
| 31 |
+
"created": timestamp,
|
| 32 |
+
"owned_by": "x-ai",
|
| 33 |
+
"display_name": config.get("display_name", model_id),
|
| 34 |
+
"description": config.get("description", ""),
|
| 35 |
+
"raw_model_path": config.get("raw_model_path", f"xai/{model_id}"),
|
| 36 |
+
"default_temperature": config.get("default_temperature", 1.0),
|
| 37 |
+
"default_max_output_tokens": config.get("default_max_output_tokens", 8192),
|
| 38 |
+
"supported_max_output_tokens": config.get("supported_max_output_tokens", 131072),
|
| 39 |
+
"default_top_p": config.get("default_top_p", 0.95)
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
model_data.append(model_info)
|
| 43 |
+
|
| 44 |
+
logger.debug(f"[Models] ่ฟๅ {len(model_data)} ไธชๆจกๅ")
|
| 45 |
+
return {"object": "list", "data": model_data}
|
| 46 |
+
|
| 47 |
+
except Exception as e:
|
| 48 |
+
logger.error(f"[Models] ่ทๅๅ่กจๅคฑ่ดฅ: {e}")
|
| 49 |
+
raise HTTPException(
|
| 50 |
+
status_code=500,
|
| 51 |
+
detail={
|
| 52 |
+
"error": {
|
| 53 |
+
"message": f"Failed to retrieve models: {e}",
|
| 54 |
+
"type": "internal_error",
|
| 55 |
+
"code": "model_list_error"
|
| 56 |
+
}
|
| 57 |
+
}
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@router.get("/models/{model_id}")
|
| 62 |
+
async def get_model(model_id: str, _: Optional[str] = Depends(auth_manager.verify)) -> Dict[str, Any]:
|
| 63 |
+
"""่ทๅ็นๅฎๆจกๅไฟกๆฏ"""
|
| 64 |
+
try:
|
| 65 |
+
logger.debug(f"[Models] ่ฏทๆฑๆจกๅ: {model_id}")
|
| 66 |
+
|
| 67 |
+
# ้ช่ฏๆจกๅ
|
| 68 |
+
if not Models.is_valid_model(model_id):
|
| 69 |
+
logger.warning(f"[Models] ๆจกๅไธๅญๅจ: {model_id}")
|
| 70 |
+
raise HTTPException(
|
| 71 |
+
status_code=404,
|
| 72 |
+
detail={
|
| 73 |
+
"error": {
|
| 74 |
+
"message": f"Model '{model_id}' not found",
|
| 75 |
+
"type": "invalid_request_error",
|
| 76 |
+
"code": "model_not_found"
|
| 77 |
+
}
|
| 78 |
+
}
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
timestamp = int(time.time())
|
| 82 |
+
config = Models.get_model_info(model_id)
|
| 83 |
+
|
| 84 |
+
model_info = {
|
| 85 |
+
"id": model_id,
|
| 86 |
+
"object": "model",
|
| 87 |
+
"created": timestamp,
|
| 88 |
+
"owned_by": "x-ai",
|
| 89 |
+
"display_name": config.get("display_name", model_id),
|
| 90 |
+
"description": config.get("description", ""),
|
| 91 |
+
"raw_model_path": config.get("raw_model_path", f"xai/{model_id}"),
|
| 92 |
+
"default_temperature": config.get("default_temperature", 1.0),
|
| 93 |
+
"default_max_output_tokens": config.get("default_max_output_tokens", 8192),
|
| 94 |
+
"supported_max_output_tokens": config.get("supported_max_output_tokens", 131072),
|
| 95 |
+
"default_top_p": config.get("default_top_p", 0.95)
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
logger.debug(f"[Models] ่ฟๅๆจกๅ: {model_id}")
|
| 99 |
+
return model_info
|
| 100 |
+
|
| 101 |
+
except HTTPException:
|
| 102 |
+
raise
|
| 103 |
+
except Exception as e:
|
| 104 |
+
logger.error(f"[Models] ่ทๅๆจกๅๅคฑ่ดฅ: {e}")
|
| 105 |
+
raise HTTPException(
|
| 106 |
+
status_code=500,
|
| 107 |
+
detail={
|
| 108 |
+
"error": {
|
| 109 |
+
"message": f"Failed to retrieve model: {e}",
|
| 110 |
+
"type": "internal_error",
|
| 111 |
+
"code": "model_retrieve_error"
|
| 112 |
+
}
|
| 113 |
+
}
|
| 114 |
+
)
|
app/core/auth.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""่ฎค่ฏๆจกๅ - APIไปค็้ช่ฏ"""
|
| 2 |
+
|
| 3 |
+
from typing import Optional, Dict
|
| 4 |
+
from fastapi import Depends, HTTPException
|
| 5 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 6 |
+
|
| 7 |
+
from app.core.config import setting
|
| 8 |
+
from app.core.logger import logger
|
| 9 |
+
from app.services.api_keys import api_key_manager
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
# Bearerๅฎๅ
จๆนๆก
|
| 13 |
+
security = HTTPBearer(auto_error=False)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _build_error(message: str, code: str = "invalid_token") -> dict:
|
| 17 |
+
"""ๆๅปบ่ฎค่ฏ้่ฏฏ"""
|
| 18 |
+
return {
|
| 19 |
+
"error": {
|
| 20 |
+
"message": message,
|
| 21 |
+
"type": "authentication_error",
|
| 22 |
+
"code": code
|
| 23 |
+
}
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class AuthManager:
|
| 28 |
+
"""่ฎค่ฏ็ฎก็ๅจ - ้ช่ฏAPIไปค็"""
|
| 29 |
+
|
| 30 |
+
@staticmethod
|
| 31 |
+
async def verify(credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)) -> Dict:
|
| 32 |
+
"""้ช่ฏไปค็๏ผ่ฟๅ Key ไฟกๆฏ"""
|
| 33 |
+
api_key = setting.grok_config.get("api_key")
|
| 34 |
+
|
| 35 |
+
# ๅๅงๅๆฃๆฅ
|
| 36 |
+
if not hasattr(api_key_manager, '_keys'):
|
| 37 |
+
await api_key_manager.init()
|
| 38 |
+
|
| 39 |
+
# ๆฃๆฅไปค็
|
| 40 |
+
if not credentials:
|
| 41 |
+
# ๅฆๆๆช่ฎพ็ฝฎๅ
จๅฑKeyไธๆฒกๆๅคKey๏ผๅ่ทณ่ฟ๏ผๅผๅๆจกๅผ๏ผ
|
| 42 |
+
if not api_key and not api_key_manager.get_all_keys():
|
| 43 |
+
logger.debug("[Auth] ๆช่ฎพ็ฝฎAPI_KEY๏ผ่ทณ่ฟ้ช่ฏ")
|
| 44 |
+
return {"key": None, "name": "Anonymous"}
|
| 45 |
+
|
| 46 |
+
raise HTTPException(
|
| 47 |
+
status_code=401,
|
| 48 |
+
detail=_build_error("็ผบๅฐ่ฎค่ฏไปค็", "missing_token")
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
token = credentials.credentials
|
| 52 |
+
|
| 53 |
+
# ้ช่ฏไปค็ (ๆฏๆๅค Key)
|
| 54 |
+
key_info = api_key_manager.validate_key(token)
|
| 55 |
+
|
| 56 |
+
if key_info:
|
| 57 |
+
return key_info
|
| 58 |
+
|
| 59 |
+
raise HTTPException(
|
| 60 |
+
status_code=401,
|
| 61 |
+
detail=_build_error(f"ไปค็ๆ ๆ๏ผ้ฟๅบฆ: {len(token)}", "invalid_token")
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# ๅ
จๅฑๅฎไพ
|
| 66 |
+
auth_manager = AuthManager()
|
app/core/config.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""้
็ฝฎ็ฎก็ๅจ - ็ฎก็ๅบ็จ้
็ฝฎ็่ฏปๅ"""
|
| 2 |
+
|
| 3 |
+
import toml
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from typing import Dict, Any, Optional, Literal
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
# ้ป่ฎค้
็ฝฎ
|
| 9 |
+
DEFAULT_GROK = {
|
| 10 |
+
"api_key": "",
|
| 11 |
+
"proxy_url": "",
|
| 12 |
+
"proxy_pool_url": "",
|
| 13 |
+
"proxy_pool_interval": 300,
|
| 14 |
+
"cache_proxy_url": "",
|
| 15 |
+
"cf_clearance": "",
|
| 16 |
+
"x_statsig_id": "",
|
| 17 |
+
"dynamic_statsig": True,
|
| 18 |
+
"filtered_tags": "xaiartifact,xai:tool_usage_card",
|
| 19 |
+
"show_thinking": True,
|
| 20 |
+
"temporary": False,
|
| 21 |
+
"max_upload_concurrency": 20,
|
| 22 |
+
"max_request_concurrency": 100,
|
| 23 |
+
"stream_first_response_timeout": 30,
|
| 24 |
+
"stream_chunk_timeout": 120,
|
| 25 |
+
"stream_total_timeout": 600,
|
| 26 |
+
"retry_status_codes": [401, 429], # ๅฏ้่ฏ็HTTP็ถๆ็
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
DEFAULT_GLOBAL = {
|
| 30 |
+
"base_url": "http://localhost:8000",
|
| 31 |
+
"log_level": "INFO",
|
| 32 |
+
"image_mode": "url",
|
| 33 |
+
"admin_password": "admin",
|
| 34 |
+
"admin_username": "admin",
|
| 35 |
+
"image_cache_max_size_mb": 512,
|
| 36 |
+
"video_cache_max_size_mb": 1024,
|
| 37 |
+
"max_upload_concurrency": 20, # ๆๅคงๅนถๅไธไผ ๆฐ
|
| 38 |
+
"max_request_concurrency": 50, # ๆๅคงๅนถๅ่ฏทๆฑๆฐ
|
| 39 |
+
"batch_save_interval": 1.0, # ๆน้ไฟๅญ้ด้๏ผ็ง๏ผ
|
| 40 |
+
"batch_save_threshold": 10 # ่งฆๅๆน้ไฟๅญ็ๅๆดๆฐ้ๅผ
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class ConfigManager:
|
| 45 |
+
"""้
็ฝฎ็ฎก็ๅจ"""
|
| 46 |
+
|
| 47 |
+
def __init__(self) -> None:
|
| 48 |
+
"""ๅๅงๅ้
็ฝฎ"""
|
| 49 |
+
self.config_path: Path = Path(__file__).parents[2] / "data" / "setting.toml"
|
| 50 |
+
self._storage: Optional[Any] = None
|
| 51 |
+
self._ensure_exists()
|
| 52 |
+
self.global_config: Dict[str, Any] = self.load("global")
|
| 53 |
+
self.grok_config: Dict[str, Any] = self.load("grok")
|
| 54 |
+
|
| 55 |
+
def _ensure_exists(self) -> None:
|
| 56 |
+
"""็กฎไฟ้
็ฝฎๅญๅจ"""
|
| 57 |
+
if not self.config_path.exists():
|
| 58 |
+
self.config_path.parent.mkdir(parents=True, exist_ok=True)
|
| 59 |
+
self._create_default()
|
| 60 |
+
|
| 61 |
+
def _create_default(self) -> None:
|
| 62 |
+
"""ๅๅปบ้ป่ฎค้
็ฝฎ"""
|
| 63 |
+
default = {"grok": DEFAULT_GROK.copy(), "global": DEFAULT_GLOBAL.copy()}
|
| 64 |
+
with open(self.config_path, "w", encoding="utf-8") as f:
|
| 65 |
+
toml.dump(default, f)
|
| 66 |
+
|
| 67 |
+
def _normalize_proxy(self, proxy: str) -> str:
|
| 68 |
+
"""ๆ ๅๅไปฃ็URL๏ผsock5/socks5 โ socks5h://๏ผ"""
|
| 69 |
+
if not proxy:
|
| 70 |
+
return proxy
|
| 71 |
+
|
| 72 |
+
proxy = proxy.strip()
|
| 73 |
+
if proxy.startswith("sock5h://"):
|
| 74 |
+
proxy = proxy.replace("sock5h://", "socks5h://", 1)
|
| 75 |
+
if proxy.startswith("sock5://"):
|
| 76 |
+
proxy = proxy.replace("sock5://", "socks5://", 1)
|
| 77 |
+
if proxy.startswith("socks5://"):
|
| 78 |
+
return proxy.replace("socks5://", "socks5h://", 1)
|
| 79 |
+
return proxy
|
| 80 |
+
|
| 81 |
+
def _normalize_cf(self, cf: str) -> str:
|
| 82 |
+
"""ๆ ๅๅCF Clearance๏ผ่ชๅจๆทปๅ ๅ็ผ๏ผ"""
|
| 83 |
+
if cf and not cf.startswith("cf_clearance="):
|
| 84 |
+
return f"cf_clearance={cf}"
|
| 85 |
+
return cf
|
| 86 |
+
|
| 87 |
+
def set_storage(self, storage: Any) -> None:
|
| 88 |
+
"""่ฎพ็ฝฎๅญๅจๅฎไพ"""
|
| 89 |
+
self._storage = storage
|
| 90 |
+
|
| 91 |
+
def load(self, section: Literal["global", "grok"]) -> Dict[str, Any]:
|
| 92 |
+
"""ๅ ่ฝฝ้
็ฝฎ่"""
|
| 93 |
+
try:
|
| 94 |
+
with open(self.config_path, "r", encoding="utf-8") as f:
|
| 95 |
+
config = toml.load(f)[section]
|
| 96 |
+
|
| 97 |
+
# ๆ ๅๅGrok้
็ฝฎ
|
| 98 |
+
if section == "grok":
|
| 99 |
+
if "proxy_url" in config:
|
| 100 |
+
config["proxy_url"] = self._normalize_proxy(config["proxy_url"])
|
| 101 |
+
if "cache_proxy_url" in config:
|
| 102 |
+
config["cache_proxy_url"] = self._normalize_proxy(config["cache_proxy_url"])
|
| 103 |
+
if "cf_clearance" in config:
|
| 104 |
+
config["cf_clearance"] = self._normalize_cf(config["cf_clearance"])
|
| 105 |
+
|
| 106 |
+
return config
|
| 107 |
+
except Exception as e:
|
| 108 |
+
raise Exception(f"[Setting] ้
็ฝฎๅ ่ฝฝๅคฑ่ดฅ: {e}") from e
|
| 109 |
+
|
| 110 |
+
async def reload(self) -> None:
|
| 111 |
+
"""้ๆฐๅ ่ฝฝ้
็ฝฎ"""
|
| 112 |
+
self.global_config = self.load("global")
|
| 113 |
+
self.grok_config = self.load("grok")
|
| 114 |
+
|
| 115 |
+
async def _save_file(self, updates: Dict[str, Dict[str, Any]]) -> None:
|
| 116 |
+
"""ไฟๅญๅฐๆไปถ"""
|
| 117 |
+
import aiofiles
|
| 118 |
+
|
| 119 |
+
async with aiofiles.open(self.config_path, "r", encoding="utf-8") as f:
|
| 120 |
+
config = toml.loads(await f.read())
|
| 121 |
+
|
| 122 |
+
for section, data in updates.items():
|
| 123 |
+
if section in config:
|
| 124 |
+
config[section].update(data)
|
| 125 |
+
|
| 126 |
+
async with aiofiles.open(self.config_path, "w", encoding="utf-8") as f:
|
| 127 |
+
await f.write(toml.dumps(config))
|
| 128 |
+
|
| 129 |
+
async def _save_storage(self, updates: Dict[str, Dict[str, Any]]) -> None:
|
| 130 |
+
"""ไฟๅญๅฐๅญๅจ"""
|
| 131 |
+
config = await self._storage.load_config()
|
| 132 |
+
|
| 133 |
+
for section, data in updates.items():
|
| 134 |
+
if section in config:
|
| 135 |
+
config[section].update(data)
|
| 136 |
+
|
| 137 |
+
await self._storage.save_config(config)
|
| 138 |
+
|
| 139 |
+
def _prepare_grok(self, grok: Dict[str, Any]) -> Dict[str, Any]:
|
| 140 |
+
"""ๅๅคGrok้
็ฝฎ๏ผ็งป้คๅ็ผ๏ผ"""
|
| 141 |
+
processed = grok.copy()
|
| 142 |
+
if "cf_clearance" in processed:
|
| 143 |
+
cf = processed["cf_clearance"]
|
| 144 |
+
if cf and cf.startswith("cf_clearance="):
|
| 145 |
+
processed["cf_clearance"] = cf.replace("cf_clearance=", "", 1)
|
| 146 |
+
return processed
|
| 147 |
+
|
| 148 |
+
async def save(self, global_config: Optional[Dict[str, Any]] = None, grok_config: Optional[Dict[str, Any]] = None) -> None:
|
| 149 |
+
"""ไฟๅญ้
็ฝฎ"""
|
| 150 |
+
updates = {}
|
| 151 |
+
|
| 152 |
+
if global_config:
|
| 153 |
+
updates["global"] = global_config
|
| 154 |
+
if grok_config:
|
| 155 |
+
updates["grok"] = self._prepare_grok(grok_config)
|
| 156 |
+
|
| 157 |
+
# ้ๆฉๅญๅจๆนๅผ
|
| 158 |
+
if self._storage:
|
| 159 |
+
await self._save_storage(updates)
|
| 160 |
+
else:
|
| 161 |
+
await self._save_file(updates)
|
| 162 |
+
|
| 163 |
+
await self.reload()
|
| 164 |
+
|
| 165 |
+
async def get_proxy_async(self, proxy_type: Literal["service", "cache"] = "service") -> str:
|
| 166 |
+
"""ๅผๆญฅ่ทๅไปฃ็URL๏ผๆฏๆไปฃ็ๆฑ ๏ผ
|
| 167 |
+
|
| 168 |
+
Args:
|
| 169 |
+
proxy_type: ไปฃ็็ฑปๅ
|
| 170 |
+
- service: ๆๅกไปฃ็๏ผclient/upload๏ผ
|
| 171 |
+
- cache: ็ผๅญไปฃ็๏ผcache๏ผ
|
| 172 |
+
"""
|
| 173 |
+
from app.core.proxy_pool import proxy_pool
|
| 174 |
+
|
| 175 |
+
if proxy_type == "cache":
|
| 176 |
+
cache_proxy = self.grok_config.get("cache_proxy_url", "")
|
| 177 |
+
if cache_proxy:
|
| 178 |
+
return cache_proxy
|
| 179 |
+
|
| 180 |
+
# ไปไปฃ็ๆฑ ่ทๅ
|
| 181 |
+
return await proxy_pool.get_proxy() or ""
|
| 182 |
+
|
| 183 |
+
def get_proxy(self, proxy_type: Literal["service", "cache"] = "service") -> str:
|
| 184 |
+
"""่ทๅไปฃ็URL๏ผๅๆญฅๆนๆณ๏ผ็จไบๅๅๅ
ผๅฎน๏ผ
|
| 185 |
+
|
| 186 |
+
Args:
|
| 187 |
+
proxy_type: ไปฃ็็ฑปๅ
|
| 188 |
+
- service: ๆๅกไปฃ็๏ผclient/upload๏ผ
|
| 189 |
+
- cache: ็ผๅญไปฃ็๏ผcache๏ผ
|
| 190 |
+
"""
|
| 191 |
+
from app.core.proxy_pool import proxy_pool
|
| 192 |
+
|
| 193 |
+
if proxy_type == "cache":
|
| 194 |
+
cache_proxy = self.grok_config.get("cache_proxy_url", "")
|
| 195 |
+
if cache_proxy:
|
| 196 |
+
return cache_proxy
|
| 197 |
+
|
| 198 |
+
# ่ฟๅๅฝๅไปฃ็๏ผๅฆๆๆฏไปฃ็ๆฑ ๏ผ่ฟๅๆๅไธๆฌก่ทๅ็๏ผ
|
| 199 |
+
return proxy_pool.get_current_proxy() or self.grok_config.get("proxy_url", "")
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
# ๅ
จๅฑๅฎไพ
|
| 203 |
+
setting = ConfigManager()
|
app/core/exception.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ๅผๅธธๅค็ๅจ - OpenAIๅ
ผๅฎน็้่ฏฏๅๅบ"""
|
| 2 |
+
|
| 3 |
+
from fastapi import Request, status
|
| 4 |
+
from fastapi.responses import JSONResponse
|
| 5 |
+
from fastapi.exceptions import RequestValidationError
|
| 6 |
+
from starlette.exceptions import HTTPException as StarletteHTTPException
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# HTTP้่ฏฏๆ ๅฐ
|
| 10 |
+
HTTP_ERROR_MAP = {
|
| 11 |
+
400: ("invalid_request_error", "่ฏทๆฑๆ ผๅผ้่ฏฏๆ็ผบๅฐๅฟ
ๅกซๅๆฐ"),
|
| 12 |
+
401: ("invalid_request_error", "ไปค็่ฎค่ฏๅคฑ่ดฅ"),
|
| 13 |
+
403: ("permission_error", "ๆฒกๆๆ้่ฎฟ้ฎๆญค่ตๆบ"),
|
| 14 |
+
404: ("invalid_request_error", "่ฏทๆฑ็่ตๆบไธๅญๅจ"),
|
| 15 |
+
429: ("rate_limit_error", "่ฏทๆฑ้ข็่ถ
ๅบ้ๅถ๏ผ่ฏท็จๅๅ่ฏ"),
|
| 16 |
+
500: ("api_error", "ๅ
้จๆๅกๅจ้่ฏฏ"),
|
| 17 |
+
503: ("api_error", "ๆๅกๆๆถไธๅฏ็จ"),
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
# Grok้่ฏฏ็ ๆ ๅฐ
|
| 21 |
+
GROK_STATUS_MAP = {
|
| 22 |
+
"NO_AUTH_TOKEN": status.HTTP_401_UNAUTHORIZED,
|
| 23 |
+
"INVALID_TOKEN": status.HTTP_401_UNAUTHORIZED,
|
| 24 |
+
"HTTP_ERROR": status.HTTP_502_BAD_GATEWAY,
|
| 25 |
+
"NETWORK_ERROR": status.HTTP_503_SERVICE_UNAVAILABLE,
|
| 26 |
+
"JSON_ERROR": status.HTTP_502_BAD_GATEWAY,
|
| 27 |
+
"API_ERROR": status.HTTP_502_BAD_GATEWAY,
|
| 28 |
+
"STREAM_ERROR": status.HTTP_502_BAD_GATEWAY,
|
| 29 |
+
"NO_RESPONSE": status.HTTP_502_BAD_GATEWAY,
|
| 30 |
+
"TOKEN_SAVE_ERROR": status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 31 |
+
"NO_AVAILABLE_TOKEN": status.HTTP_503_SERVICE_UNAVAILABLE,
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
GROK_TYPE_MAP = {
|
| 35 |
+
"NO_AUTH_TOKEN": "authentication_error",
|
| 36 |
+
"INVALID_TOKEN": "authentication_error",
|
| 37 |
+
"HTTP_ERROR": "api_error",
|
| 38 |
+
"NETWORK_ERROR": "api_error",
|
| 39 |
+
"JSON_ERROR": "api_error",
|
| 40 |
+
"API_ERROR": "api_error",
|
| 41 |
+
"STREAM_ERROR": "api_error",
|
| 42 |
+
"NO_RESPONSE": "api_error",
|
| 43 |
+
"TOKEN_SAVE_ERROR": "api_error",
|
| 44 |
+
"NO_AVAILABLE_TOKEN": "api_error",
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class GrokApiException(Exception):
|
| 49 |
+
"""Grok APIไธๅกๅผๅธธ"""
|
| 50 |
+
|
| 51 |
+
def __init__(self, message: str, error_code: str = None, details: dict = None, context: dict = None, status_code: int = None):
|
| 52 |
+
self.message = message
|
| 53 |
+
self.error_code = error_code
|
| 54 |
+
self.details = details or {}
|
| 55 |
+
self.context = context or {}
|
| 56 |
+
self.status_code = status_code or GROK_STATUS_MAP.get(error_code)
|
| 57 |
+
super().__init__(self.message)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def build_error_response(message: str, error_type: str, code: str = None, param: str = None) -> dict:
|
| 61 |
+
"""ๆๅปบOpenAIๅ
ผๅฎน็้่ฏฏๅๅบ"""
|
| 62 |
+
error = {"message": message, "type": error_type}
|
| 63 |
+
|
| 64 |
+
if code:
|
| 65 |
+
error["code"] = code
|
| 66 |
+
if param:
|
| 67 |
+
error["param"] = param
|
| 68 |
+
|
| 69 |
+
return {"error": error}
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
async def http_exception_handler(_: Request, exc: StarletteHTTPException) -> JSONResponse:
|
| 73 |
+
"""ๅค็HTTPๅผๅธธ"""
|
| 74 |
+
error_type, default_msg = HTTP_ERROR_MAP.get(exc.status_code, ("api_error", str(exc.detail)))
|
| 75 |
+
message = str(exc.detail) if exc.detail else default_msg
|
| 76 |
+
|
| 77 |
+
return JSONResponse(
|
| 78 |
+
status_code=exc.status_code,
|
| 79 |
+
content=build_error_response(message, error_type)
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
async def validation_exception_handler(_: Request, exc: RequestValidationError) -> JSONResponse:
|
| 84 |
+
"""ๅค็้ช่ฏ้่ฏฏ"""
|
| 85 |
+
errors = exc.errors()
|
| 86 |
+
param = errors[0]["loc"][-1] if errors and errors[0].get("loc") else None
|
| 87 |
+
message = errors[0]["msg"] if errors and errors[0].get("msg") else "่ฏทๆฑๅๆฐ้่ฏฏ"
|
| 88 |
+
|
| 89 |
+
return JSONResponse(
|
| 90 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 91 |
+
content=build_error_response(message, "invalid_request_error", param=param)
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
async def grok_api_exception_handler(_: Request, exc: GrokApiException) -> JSONResponse:
|
| 96 |
+
"""ๅค็Grok APIๅผๅธธ"""
|
| 97 |
+
http_status = GROK_STATUS_MAP.get(exc.error_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
|
| 98 |
+
error_type = GROK_TYPE_MAP.get(exc.error_code, "api_error")
|
| 99 |
+
|
| 100 |
+
return JSONResponse(
|
| 101 |
+
status_code=http_status,
|
| 102 |
+
content=build_error_response(exc.message, error_type, exc.error_code)
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
async def global_exception_handler(_: Request, exc: Exception) -> JSONResponse:
|
| 107 |
+
"""ๅค็ๆชๆ่ทๅผๅธธ"""
|
| 108 |
+
return JSONResponse(
|
| 109 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 110 |
+
content=build_error_response("ๆๅกๅจ้ๅฐๆๅค้่ฏฏ๏ผ่ฏท้่ฏ", "api_error")
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def register_exception_handlers(app) -> None:
|
| 115 |
+
"""ๆณจๅๅผๅธธๅค็ๅจ"""
|
| 116 |
+
app.add_exception_handler(StarletteHTTPException, http_exception_handler)
|
| 117 |
+
app.add_exception_handler(RequestValidationError, validation_exception_handler)
|
| 118 |
+
app.add_exception_handler(GrokApiException, grok_api_exception_handler)
|
| 119 |
+
app.add_exception_handler(Exception, global_exception_handler)
|
app/core/logger.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ๅ
จๅฑๆฅๅฟๆจกๅ - ๅไพๆจกๅผ็ๆฅๅฟ็ฎก็ๅจ"""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
import logging
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from logging.handlers import RotatingFileHandler
|
| 7 |
+
|
| 8 |
+
from app.core.config import setting
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# ่ฟๆปคๆจกๅผ
|
| 12 |
+
FILTER_PATTERNS = [
|
| 13 |
+
"chunk: b'", # SSEๅๅงๅญ่
|
| 14 |
+
"Got event:", # SSEไบไปถ
|
| 15 |
+
"Closing", # SSEๅ
ณ้ญ
|
| 16 |
+
]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class MCPLogFilter(logging.Filter):
|
| 20 |
+
"""MCPๆฅๅฟ่ฟๆปคๅจ - ่ฟๆปคๅคง้ๆฐๆฎ็DEBUGๆฅๅฟ"""
|
| 21 |
+
|
| 22 |
+
def filter(self, record: logging.LogRecord) -> bool:
|
| 23 |
+
"""่ฟๆปคๆฅๅฟ"""
|
| 24 |
+
# ่ฟๆปคSSE็DEBUGๆฅๅฟ
|
| 25 |
+
if record.name == "sse_starlette.sse" and record.levelno == logging.DEBUG:
|
| 26 |
+
msg = record.getMessage()
|
| 27 |
+
return not any(p in msg for p in FILTER_PATTERNS)
|
| 28 |
+
|
| 29 |
+
# ่ฟๆปคMCP streamable_http็DEBUGๆฅๅฟ
|
| 30 |
+
if "mcp.server.streamable_http" in record.name and record.levelno == logging.DEBUG:
|
| 31 |
+
return False
|
| 32 |
+
|
| 33 |
+
return True
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class LoggerManager:
|
| 37 |
+
"""ๆฅๅฟ็ฎก็ๅจ๏ผๅไพ๏ผ"""
|
| 38 |
+
|
| 39 |
+
_instance = None
|
| 40 |
+
_initialized = False
|
| 41 |
+
|
| 42 |
+
def __new__(cls):
|
| 43 |
+
if cls._instance is None:
|
| 44 |
+
cls._instance = super().__new__(cls)
|
| 45 |
+
return cls._instance
|
| 46 |
+
|
| 47 |
+
def __init__(self):
|
| 48 |
+
"""ๅๅงๅๆฅๅฟ็ณป็ป"""
|
| 49 |
+
if LoggerManager._initialized:
|
| 50 |
+
return
|
| 51 |
+
|
| 52 |
+
# ้
็ฝฎ
|
| 53 |
+
log_dir = Path(__file__).parents[2] / "logs"
|
| 54 |
+
log_dir.mkdir(exist_ok=True)
|
| 55 |
+
log_level = setting.global_config.get("log_level", "INFO").upper()
|
| 56 |
+
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 57 |
+
log_file = log_dir / "app.log"
|
| 58 |
+
|
| 59 |
+
# ๆ นๆฅๅฟๅจ
|
| 60 |
+
self.logger = logging.getLogger()
|
| 61 |
+
self.logger.setLevel(log_level)
|
| 62 |
+
|
| 63 |
+
# ้ฟๅ
้ๅคๆทปๅ
|
| 64 |
+
if self.logger.handlers:
|
| 65 |
+
return
|
| 66 |
+
|
| 67 |
+
# ๆ ผๅผๅจๅ่ฟๆปคๅจ
|
| 68 |
+
formatter = logging.Formatter(log_format)
|
| 69 |
+
mcp_filter = MCPLogFilter()
|
| 70 |
+
|
| 71 |
+
# ๆงๅถๅฐๅค็ๅจ
|
| 72 |
+
console = logging.StreamHandler(sys.stdout)
|
| 73 |
+
console.setLevel(log_level)
|
| 74 |
+
console.setFormatter(formatter)
|
| 75 |
+
console.addFilter(mcp_filter)
|
| 76 |
+
|
| 77 |
+
# ๆไปถๅค็ๅจ๏ผ10MB๏ผ5ไธชๅคไปฝ๏ผ
|
| 78 |
+
file_handler = RotatingFileHandler(
|
| 79 |
+
log_file, maxBytes=10*1024*1024, backupCount=5, encoding="utf-8"
|
| 80 |
+
)
|
| 81 |
+
file_handler.setLevel(log_level)
|
| 82 |
+
file_handler.setFormatter(formatter)
|
| 83 |
+
file_handler.addFilter(mcp_filter)
|
| 84 |
+
|
| 85 |
+
# ๆทปๅ ๅค็ๅจ
|
| 86 |
+
self.logger.addHandler(console)
|
| 87 |
+
self.logger.addHandler(file_handler)
|
| 88 |
+
|
| 89 |
+
# ้
็ฝฎ็ฌฌไธๆนๅบ
|
| 90 |
+
self._configure_third_party()
|
| 91 |
+
|
| 92 |
+
LoggerManager._initialized = True
|
| 93 |
+
|
| 94 |
+
def _configure_third_party(self):
|
| 95 |
+
"""้
็ฝฎ็ฌฌไธๆนๅบๆฅๅฟ็บงๅซ"""
|
| 96 |
+
config = {
|
| 97 |
+
"asyncio": logging.WARNING,
|
| 98 |
+
"uvicorn": logging.INFO,
|
| 99 |
+
"fastapi": logging.INFO,
|
| 100 |
+
"aiomysql": logging.WARNING,
|
| 101 |
+
"mcp": logging.CRITICAL,
|
| 102 |
+
"fastmcp": logging.CRITICAL,
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
for name, level in config.items():
|
| 106 |
+
logging.getLogger(name).setLevel(level)
|
| 107 |
+
|
| 108 |
+
def debug(self, msg: str) -> None:
|
| 109 |
+
"""่ฐ่ฏๆฅๅฟ"""
|
| 110 |
+
self.logger.debug(msg)
|
| 111 |
+
|
| 112 |
+
def info(self, msg: str) -> None:
|
| 113 |
+
"""ไฟกๆฏๆฅๅฟ"""
|
| 114 |
+
self.logger.info(msg)
|
| 115 |
+
|
| 116 |
+
def warning(self, msg: str) -> None:
|
| 117 |
+
"""่ญฆๅๆฅๅฟ"""
|
| 118 |
+
self.logger.warning(msg)
|
| 119 |
+
|
| 120 |
+
def error(self, msg: str) -> None:
|
| 121 |
+
"""้่ฏฏๆฅๅฟ"""
|
| 122 |
+
self.logger.error(msg)
|
| 123 |
+
|
| 124 |
+
def critical(self, msg: str) -> None:
|
| 125 |
+
"""ไธฅ้้่ฏฏๆฅๅฟ"""
|
| 126 |
+
self.logger.critical(msg)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# ๅ
จๅฑๅฎไพ
|
| 130 |
+
logger = LoggerManager()
|
app/core/proxy_pool.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ไปฃ็ๆฑ ็ฎก็ๅจ - ไปURLๅจๆ่ทๅไปฃ็IP"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import aiohttp
|
| 5 |
+
import time
|
| 6 |
+
from typing import Optional, List
|
| 7 |
+
from app.core.logger import logger
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class ProxyPool:
|
| 11 |
+
"""ไปฃ็ๆฑ ็ฎก็ๅจ"""
|
| 12 |
+
|
| 13 |
+
def __init__(self):
|
| 14 |
+
self._pool_url: Optional[str] = None
|
| 15 |
+
self._static_proxy: Optional[str] = None
|
| 16 |
+
self._current_proxy: Optional[str] = None
|
| 17 |
+
self._last_fetch_time: float = 0
|
| 18 |
+
self._fetch_interval: int = 300 # 5ๅ้ๅทๆฐไธๆฌก
|
| 19 |
+
self._enabled: bool = False
|
| 20 |
+
self._lock = asyncio.Lock()
|
| 21 |
+
|
| 22 |
+
def configure(self, proxy_url: str, proxy_pool_url: str = "", proxy_pool_interval: int = 300):
|
| 23 |
+
"""้
็ฝฎไปฃ็ๆฑ
|
| 24 |
+
|
| 25 |
+
Args:
|
| 26 |
+
proxy_url: ้ๆไปฃ็URL๏ผsocks5h://xxx ๆ http://xxx๏ผ
|
| 27 |
+
proxy_pool_url: ไปฃ็ๆฑ API URL๏ผ่ฟๅๅไธชไปฃ็ๅฐๅ
|
| 28 |
+
proxy_pool_interval: ไปฃ็ๆฑ ๅทๆฐ้ด้๏ผ็ง๏ผ
|
| 29 |
+
"""
|
| 30 |
+
self._static_proxy = self._normalize_proxy(proxy_url) if proxy_url else None
|
| 31 |
+
pool_url = proxy_pool_url.strip() if proxy_pool_url else None
|
| 32 |
+
if pool_url and self._looks_like_proxy_url(pool_url):
|
| 33 |
+
normalized_proxy = self._normalize_proxy(pool_url)
|
| 34 |
+
if not self._static_proxy:
|
| 35 |
+
self._static_proxy = normalized_proxy
|
| 36 |
+
logger.warning("[ProxyPool] proxy_pool_url็่ตทๆฅๆฏไปฃ็ๅฐๅ๏ผๅทฒไฝไธบ้ๆไปฃ็ไฝฟ็จ๏ผ่ฏทๆน็จproxy_url")
|
| 37 |
+
else:
|
| 38 |
+
logger.warning("[ProxyPool] proxy_pool_url็่ตทๆฅๆฏไปฃ็ๅฐๅ๏ผๅทฒๅฟฝ็ฅ๏ผไฝฟ็จproxy_url๏ผ")
|
| 39 |
+
pool_url = None
|
| 40 |
+
self._pool_url = pool_url
|
| 41 |
+
self._fetch_interval = proxy_pool_interval
|
| 42 |
+
self._enabled = bool(self._pool_url)
|
| 43 |
+
|
| 44 |
+
if self._enabled:
|
| 45 |
+
logger.info(f"[ProxyPool] ไปฃ็ๆฑ ๅทฒๅฏ็จ: {self._pool_url}, ๅทๆฐ้ด้: {self._fetch_interval}s")
|
| 46 |
+
elif self._static_proxy:
|
| 47 |
+
logger.info(f"[ProxyPool] ไฝฟ็จ้ๆไปฃ็: {self._static_proxy}")
|
| 48 |
+
self._current_proxy = self._static_proxy
|
| 49 |
+
else:
|
| 50 |
+
logger.info("[ProxyPool] ๆช้
็ฝฎไปฃ็")
|
| 51 |
+
|
| 52 |
+
async def get_proxy(self) -> Optional[str]:
|
| 53 |
+
"""่ทๅไปฃ็ๅฐๅ
|
| 54 |
+
|
| 55 |
+
Returns:
|
| 56 |
+
ไปฃ็URLๆNone
|
| 57 |
+
"""
|
| 58 |
+
# ๅฆๆๆชๅฏ็จไปฃ็ๆฑ ๏ผ่ฟๅ้ๆไปฃ็
|
| 59 |
+
if not self._enabled:
|
| 60 |
+
return self._static_proxy
|
| 61 |
+
|
| 62 |
+
# ๆฃๆฅๆฏๅฆ้่ฆๅทๆฐ
|
| 63 |
+
now = time.time()
|
| 64 |
+
if not self._current_proxy or (now - self._last_fetch_time) >= self._fetch_interval:
|
| 65 |
+
async with self._lock:
|
| 66 |
+
# ๅ้ๆฃๆฅ
|
| 67 |
+
if not self._current_proxy or (now - self._last_fetch_time) >= self._fetch_interval:
|
| 68 |
+
await self._fetch_proxy()
|
| 69 |
+
|
| 70 |
+
return self._current_proxy
|
| 71 |
+
|
| 72 |
+
async def force_refresh(self) -> Optional[str]:
|
| 73 |
+
"""ๅผบๅถๅทๆฐไปฃ็๏ผ็จไบ403้่ฏฏ้่ฏ๏ผ
|
| 74 |
+
|
| 75 |
+
Returns:
|
| 76 |
+
ๆฐ็ไปฃ็URLๆNone
|
| 77 |
+
"""
|
| 78 |
+
if not self._enabled:
|
| 79 |
+
return self._static_proxy
|
| 80 |
+
|
| 81 |
+
async with self._lock:
|
| 82 |
+
await self._fetch_proxy()
|
| 83 |
+
|
| 84 |
+
return self._current_proxy
|
| 85 |
+
|
| 86 |
+
async def _fetch_proxy(self):
|
| 87 |
+
"""ไปไปฃ็ๆฑ URL่ทๅๆฐ็ไปฃ็"""
|
| 88 |
+
try:
|
| 89 |
+
logger.debug(f"[ProxyPool] ๆญฃๅจไปไปฃ็ๆฑ ่ทๅๆฐไปฃ็: {self._pool_url}")
|
| 90 |
+
|
| 91 |
+
timeout = aiohttp.ClientTimeout(total=10)
|
| 92 |
+
async with aiohttp.ClientSession(timeout=timeout) as session:
|
| 93 |
+
async with session.get(self._pool_url) as response:
|
| 94 |
+
if response.status == 200:
|
| 95 |
+
proxy_text = await response.text()
|
| 96 |
+
proxy = self._normalize_proxy(proxy_text.strip())
|
| 97 |
+
|
| 98 |
+
# ้ช่ฏไปฃ็ๆ ผๅผ
|
| 99 |
+
if self._validate_proxy(proxy):
|
| 100 |
+
self._current_proxy = proxy
|
| 101 |
+
self._last_fetch_time = time.time()
|
| 102 |
+
logger.info(f"[ProxyPool] ๆๅ่ทๅๆฐไปฃ็: {proxy}")
|
| 103 |
+
else:
|
| 104 |
+
logger.error(f"[ProxyPool] ไปฃ็ๆ ผๅผๆ ๆ: {proxy}")
|
| 105 |
+
# ้็บงๅฐ้ๆไปฃ็
|
| 106 |
+
if not self._current_proxy:
|
| 107 |
+
self._current_proxy = self._static_proxy
|
| 108 |
+
else:
|
| 109 |
+
logger.error(f"[ProxyPool] ่ทๅไปฃ็ๅคฑ่ดฅ: HTTP {response.status}")
|
| 110 |
+
# ้็บงๅฐ้ๆไปฃ็
|
| 111 |
+
if not self._current_proxy:
|
| 112 |
+
self._current_proxy = self._static_proxy
|
| 113 |
+
|
| 114 |
+
except asyncio.TimeoutError:
|
| 115 |
+
logger.error("[ProxyPool] ่ทๅไปฃ็่ถ
ๆถ")
|
| 116 |
+
if not self._current_proxy:
|
| 117 |
+
self._current_proxy = self._static_proxy
|
| 118 |
+
|
| 119 |
+
except Exception as e:
|
| 120 |
+
logger.error(f"[ProxyPool] ่ทๅไปฃ็ๅผๅธธ: {e}")
|
| 121 |
+
# ้็บงๅฐ้ๆไปฃ็
|
| 122 |
+
if not self._current_proxy:
|
| 123 |
+
self._current_proxy = self._static_proxy
|
| 124 |
+
|
| 125 |
+
def _validate_proxy(self, proxy: str) -> bool:
|
| 126 |
+
"""้ช่ฏไปฃ็ๆ ผๅผ
|
| 127 |
+
|
| 128 |
+
Args:
|
| 129 |
+
proxy: ไปฃ็URL
|
| 130 |
+
|
| 131 |
+
Returns:
|
| 132 |
+
ๆฏๅฆๆๆ
|
| 133 |
+
"""
|
| 134 |
+
if not proxy:
|
| 135 |
+
return False
|
| 136 |
+
|
| 137 |
+
# ๆฏๆ็ๅ่ฎฎ
|
| 138 |
+
valid_protocols = ['http://', 'https://', 'socks5://', 'socks5h://']
|
| 139 |
+
|
| 140 |
+
return any(proxy.startswith(proto) for proto in valid_protocols)
|
| 141 |
+
|
| 142 |
+
def _normalize_proxy(self, proxy: str) -> str:
|
| 143 |
+
"""ๆ ๅๅไปฃ็URL๏ผsock5/socks5 โ socks5h://๏ผ"""
|
| 144 |
+
if not proxy:
|
| 145 |
+
return proxy
|
| 146 |
+
|
| 147 |
+
proxy = proxy.strip()
|
| 148 |
+
if proxy.startswith("sock5h://"):
|
| 149 |
+
proxy = proxy.replace("sock5h://", "socks5h://", 1)
|
| 150 |
+
if proxy.startswith("sock5://"):
|
| 151 |
+
proxy = proxy.replace("sock5://", "socks5://", 1)
|
| 152 |
+
if proxy.startswith("socks5://"):
|
| 153 |
+
return proxy.replace("socks5://", "socks5h://", 1)
|
| 154 |
+
return proxy
|
| 155 |
+
|
| 156 |
+
def _looks_like_proxy_url(self, url: str) -> bool:
|
| 157 |
+
"""ๅคๆญURLๆฏๅฆๅไปฃ็ๅฐๅ๏ผ้ฟๅ
่ฏฏๆไปฃ็ๆฑ APIๅฝไปฃ็๏ผ"""
|
| 158 |
+
return url.startswith(("sock5://", "sock5h://", "socks5://", "socks5h://"))
|
| 159 |
+
|
| 160 |
+
def get_current_proxy(self) -> Optional[str]:
|
| 161 |
+
"""่ทๅๅฝๅไฝฟ็จ็ไปฃ็๏ผๅๆญฅๆนๆณ๏ผ
|
| 162 |
+
|
| 163 |
+
Returns:
|
| 164 |
+
ๅฝๅไปฃ็URLๆNone
|
| 165 |
+
"""
|
| 166 |
+
return self._current_proxy or self._static_proxy
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
# ๅ
จๅฑไปฃ็ๆฑ ๅฎไพ
|
| 170 |
+
proxy_pool = ProxyPool()
|
app/core/storage.py
ADDED
|
@@ -0,0 +1,445 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ๅญๅจๆฝ่ฑกๅฑ - ๆฏๆๆไปถใMySQLๅRedisๅญๅจ"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import orjson
|
| 5 |
+
import toml
|
| 6 |
+
import asyncio
|
| 7 |
+
import warnings
|
| 8 |
+
import aiofiles
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import Dict, Any, Optional, Literal
|
| 11 |
+
from abc import ABC, abstractmethod
|
| 12 |
+
from urllib.parse import urlparse, unquote
|
| 13 |
+
|
| 14 |
+
from app.core.logger import logger
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
StorageMode = Literal["file", "mysql", "redis"]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class BaseStorage(ABC):
|
| 21 |
+
"""ๅญๅจๅบ็ฑป"""
|
| 22 |
+
|
| 23 |
+
@abstractmethod
|
| 24 |
+
async def init_db(self) -> None:
|
| 25 |
+
"""ๅๅงๅๆฐๆฎๅบ"""
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
@abstractmethod
|
| 29 |
+
async def load_tokens(self) -> Dict[str, Any]:
|
| 30 |
+
"""ๅ ่ฝฝtokenๆฐๆฎ"""
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
@abstractmethod
|
| 34 |
+
async def save_tokens(self, data: Dict[str, Any]) -> None:
|
| 35 |
+
"""ไฟๅญtokenๆฐๆฎ"""
|
| 36 |
+
pass
|
| 37 |
+
|
| 38 |
+
@abstractmethod
|
| 39 |
+
async def load_config(self) -> Dict[str, Any]:
|
| 40 |
+
"""ๅ ่ฝฝ้
็ฝฎๆฐๆฎ"""
|
| 41 |
+
pass
|
| 42 |
+
|
| 43 |
+
@abstractmethod
|
| 44 |
+
async def save_config(self, data: Dict[str, Any]) -> None:
|
| 45 |
+
"""ไฟๅญ้
็ฝฎๆฐๆฎ"""
|
| 46 |
+
pass
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class FileStorage(BaseStorage):
|
| 50 |
+
"""ๆไปถๅญๅจ"""
|
| 51 |
+
|
| 52 |
+
def __init__(self, data_dir: Path):
|
| 53 |
+
self.data_dir = data_dir
|
| 54 |
+
self.token_file = data_dir / "token.json"
|
| 55 |
+
self.config_file = data_dir / "setting.toml"
|
| 56 |
+
self._token_lock = asyncio.Lock()
|
| 57 |
+
self._config_lock = asyncio.Lock()
|
| 58 |
+
|
| 59 |
+
async def init_db(self) -> None:
|
| 60 |
+
"""ๅๅงๅๆไปถๅญๅจ"""
|
| 61 |
+
self.data_dir.mkdir(parents=True, exist_ok=True)
|
| 62 |
+
|
| 63 |
+
if not self.token_file.exists():
|
| 64 |
+
await self._write(self.token_file, orjson.dumps({"sso": {}, "ssoSuper": {}}, option=orjson.OPT_INDENT_2).decode())
|
| 65 |
+
logger.info("[Storage] ๅๅปบtokenๆไปถ")
|
| 66 |
+
|
| 67 |
+
if not self.config_file.exists():
|
| 68 |
+
default = {
|
| 69 |
+
"global": {"api_keys": [], "admin_username": "admin", "admin_password": "admin"},
|
| 70 |
+
"grok": {"proxy_url": "", "cf_clearance": "", "x_statsig_id": ""}
|
| 71 |
+
}
|
| 72 |
+
await self._write(self.config_file, toml.dumps(default))
|
| 73 |
+
logger.info("[Storage] ๅๅปบ้
็ฝฎๆไปถ")
|
| 74 |
+
|
| 75 |
+
async def _read(self, path: Path) -> str:
|
| 76 |
+
"""่ฏปๅๆไปถ"""
|
| 77 |
+
async with aiofiles.open(path, "r", encoding="utf-8") as f:
|
| 78 |
+
return await f.read()
|
| 79 |
+
|
| 80 |
+
async def _write(self, path: Path, content: str) -> None:
|
| 81 |
+
"""ๅๅ
ฅๆไปถ"""
|
| 82 |
+
async with aiofiles.open(path, "w", encoding="utf-8") as f:
|
| 83 |
+
await f.write(content)
|
| 84 |
+
|
| 85 |
+
async def _load_json(self, path: Path, default: Dict, lock: asyncio.Lock) -> Dict[str, Any]:
|
| 86 |
+
"""ๅ ่ฝฝJSON"""
|
| 87 |
+
try:
|
| 88 |
+
async with lock:
|
| 89 |
+
if not path.exists():
|
| 90 |
+
return default
|
| 91 |
+
return orjson.loads(await self._read(path))
|
| 92 |
+
except Exception as e:
|
| 93 |
+
logger.error(f"[Storage] ๅ ่ฝฝ{path.name}ๅคฑ่ดฅ: {e}")
|
| 94 |
+
return default
|
| 95 |
+
|
| 96 |
+
async def _save_json(self, path: Path, data: Dict, lock: asyncio.Lock) -> None:
|
| 97 |
+
"""ไฟๅญJSON"""
|
| 98 |
+
try:
|
| 99 |
+
async with lock:
|
| 100 |
+
await self._write(path, orjson.dumps(data, option=orjson.OPT_INDENT_2).decode())
|
| 101 |
+
except Exception as e:
|
| 102 |
+
logger.error(f"[Storage] ไฟๅญ{path.name}ๅคฑ่ดฅ: {e}")
|
| 103 |
+
raise
|
| 104 |
+
|
| 105 |
+
async def _load_toml(self, path: Path, default: Dict, lock: asyncio.Lock) -> Dict[str, Any]:
|
| 106 |
+
"""ๅ ่ฝฝTOML"""
|
| 107 |
+
try:
|
| 108 |
+
async with lock:
|
| 109 |
+
if not path.exists():
|
| 110 |
+
return default
|
| 111 |
+
return toml.loads(await self._read(path))
|
| 112 |
+
except Exception as e:
|
| 113 |
+
logger.error(f"[Storage] ๅ ่ฝฝ{path.name}ๅคฑ่ดฅ: {e}")
|
| 114 |
+
return default
|
| 115 |
+
|
| 116 |
+
async def _save_toml(self, path: Path, data: Dict, lock: asyncio.Lock) -> None:
|
| 117 |
+
"""ไฟๅญTOML"""
|
| 118 |
+
try:
|
| 119 |
+
async with lock:
|
| 120 |
+
await self._write(path, toml.dumps(data))
|
| 121 |
+
except Exception as e:
|
| 122 |
+
logger.error(f"[Storage] ไฟๅญ{path.name}ๅคฑ่ดฅ: {e}")
|
| 123 |
+
raise
|
| 124 |
+
|
| 125 |
+
async def load_tokens(self) -> Dict[str, Any]:
|
| 126 |
+
"""ๅ ่ฝฝtoken"""
|
| 127 |
+
return await self._load_json(self.token_file, {"sso": {}, "ssoSuper": {}}, self._token_lock)
|
| 128 |
+
|
| 129 |
+
async def save_tokens(self, data: Dict[str, Any]) -> None:
|
| 130 |
+
"""ไฟๅญtoken"""
|
| 131 |
+
await self._save_json(self.token_file, data, self._token_lock)
|
| 132 |
+
|
| 133 |
+
async def load_config(self) -> Dict[str, Any]:
|
| 134 |
+
"""ๅ ่ฝฝ้
็ฝฎ"""
|
| 135 |
+
return await self._load_toml(self.config_file, {"global": {}, "grok": {}}, self._config_lock)
|
| 136 |
+
|
| 137 |
+
async def save_config(self, data: Dict[str, Any]) -> None:
|
| 138 |
+
"""ไฟๅญ้
็ฝฎ"""
|
| 139 |
+
await self._save_toml(self.config_file, data, self._config_lock)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class MysqlStorage(BaseStorage):
|
| 143 |
+
"""MySQLๅญๅจ"""
|
| 144 |
+
|
| 145 |
+
def __init__(self, database_url: str, data_dir: Path):
|
| 146 |
+
self.database_url = database_url
|
| 147 |
+
self.data_dir = data_dir
|
| 148 |
+
self._pool = None
|
| 149 |
+
self._file = FileStorage(data_dir)
|
| 150 |
+
|
| 151 |
+
async def init_db(self) -> None:
|
| 152 |
+
"""ๅๅงๅMySQL"""
|
| 153 |
+
try:
|
| 154 |
+
import aiomysql
|
| 155 |
+
parsed = self._parse_url(self.database_url)
|
| 156 |
+
logger.info(f"[Storage] MySQL: {parsed['user']}@{parsed['host']}:{parsed['port']}/{parsed['db']}")
|
| 157 |
+
|
| 158 |
+
await self._create_db(parsed)
|
| 159 |
+
self._pool = await aiomysql.create_pool(
|
| 160 |
+
host=parsed['host'], port=parsed['port'], user=parsed['user'],
|
| 161 |
+
password=parsed['password'], db=parsed['db'], charset="utf8mb4",
|
| 162 |
+
autocommit=True, maxsize=10
|
| 163 |
+
)
|
| 164 |
+
await self._create_tables()
|
| 165 |
+
await self._file.init_db()
|
| 166 |
+
await self._sync_data()
|
| 167 |
+
|
| 168 |
+
except ImportError:
|
| 169 |
+
raise Exception("aiomysqlๆชๅฎ่ฃ
")
|
| 170 |
+
except Exception as e:
|
| 171 |
+
logger.error(f"[Storage] MySQLๅๅงๅๅคฑ่ดฅ: {e}")
|
| 172 |
+
raise
|
| 173 |
+
|
| 174 |
+
def _parse_url(self, url: str) -> Dict[str, Any]:
|
| 175 |
+
"""่งฃๆURL"""
|
| 176 |
+
p = urlparse(url)
|
| 177 |
+
return {
|
| 178 |
+
'user': unquote(p.username) if p.username else "",
|
| 179 |
+
'password': unquote(p.password) if p.password else "",
|
| 180 |
+
'host': p.hostname,
|
| 181 |
+
'port': p.port or 3306,
|
| 182 |
+
'db': p.path[1:] if p.path else "grok2api"
|
| 183 |
+
}
|
| 184 |
+
|
| 185 |
+
async def _create_db(self, parsed: Dict) -> None:
|
| 186 |
+
"""ๅๅปบๆฐๆฎๅบ"""
|
| 187 |
+
import aiomysql
|
| 188 |
+
pool = await aiomysql.create_pool(
|
| 189 |
+
host=parsed['host'], port=parsed['port'], user=parsed['user'],
|
| 190 |
+
password=parsed['password'], charset="utf8mb4", autocommit=True, maxsize=1
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
try:
|
| 194 |
+
async with pool.acquire() as conn:
|
| 195 |
+
async with conn.cursor() as cursor:
|
| 196 |
+
with warnings.catch_warnings():
|
| 197 |
+
warnings.filterwarnings('ignore', message='.*database exists')
|
| 198 |
+
await cursor.execute(
|
| 199 |
+
f"CREATE DATABASE IF NOT EXISTS `{parsed['db']}` "
|
| 200 |
+
f"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci"
|
| 201 |
+
)
|
| 202 |
+
logger.info(f"[Storage] ๆฐๆฎๅบ '{parsed['db']}' ๅฐฑ็ปช")
|
| 203 |
+
finally:
|
| 204 |
+
pool.close()
|
| 205 |
+
await pool.wait_closed()
|
| 206 |
+
|
| 207 |
+
async def _create_tables(self) -> None:
|
| 208 |
+
"""ๅๅปบ่กจ"""
|
| 209 |
+
tables = {
|
| 210 |
+
"grok_tokens": """
|
| 211 |
+
CREATE TABLE IF NOT EXISTS grok_tokens (
|
| 212 |
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
| 213 |
+
data JSON NOT NULL,
|
| 214 |
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
| 215 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 216 |
+
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
|
| 217 |
+
""",
|
| 218 |
+
"grok_settings": """
|
| 219 |
+
CREATE TABLE IF NOT EXISTS grok_settings (
|
| 220 |
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
| 221 |
+
data JSON NOT NULL,
|
| 222 |
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
| 223 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 224 |
+
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
|
| 225 |
+
"""
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
async with self._pool.acquire() as conn:
|
| 229 |
+
async with conn.cursor() as cursor:
|
| 230 |
+
with warnings.catch_warnings():
|
| 231 |
+
warnings.filterwarnings('ignore', message='.*already exists')
|
| 232 |
+
for sql in tables.values():
|
| 233 |
+
await cursor.execute(sql)
|
| 234 |
+
logger.info("[Storage] MySQL่กจๅฐฑ็ปช")
|
| 235 |
+
|
| 236 |
+
async def _sync_data(self) -> None:
|
| 237 |
+
"""ๅๆญฅๆฐๆฎ"""
|
| 238 |
+
try:
|
| 239 |
+
for table, key in [("grok_tokens", "sso"), ("grok_settings", "global")]:
|
| 240 |
+
data = await self._load_db(table)
|
| 241 |
+
if data:
|
| 242 |
+
if table == "grok_tokens":
|
| 243 |
+
await self._file.save_tokens(data)
|
| 244 |
+
else:
|
| 245 |
+
await self._file.save_config(data)
|
| 246 |
+
logger.info(f"[Storage] {table.split('_')[1]}ๆฐๆฎๅทฒไปDBๅๆญฅ")
|
| 247 |
+
else:
|
| 248 |
+
file_data = await (self._file.load_tokens() if table == "grok_tokens" else self._file.load_config())
|
| 249 |
+
if file_data.get(key) or (table == "grok_tokens" and file_data.get("ssoSuper")):
|
| 250 |
+
await self._save_db(table, file_data)
|
| 251 |
+
logger.info(f"[Storage] {table.split('_')[1]}ๆฐๆฎๅทฒๅๅงๅๅฐDB")
|
| 252 |
+
except Exception as e:
|
| 253 |
+
logger.warning(f"[Storage] ๅๆญฅๅคฑ่ดฅ: {e}")
|
| 254 |
+
|
| 255 |
+
async def _load_db(self, table: str) -> Optional[Dict]:
|
| 256 |
+
"""ไปDBๅ ่ฝฝ"""
|
| 257 |
+
try:
|
| 258 |
+
async with self._pool.acquire() as conn:
|
| 259 |
+
async with conn.cursor() as cursor:
|
| 260 |
+
await cursor.execute(f"SELECT data FROM {table} ORDER BY id DESC LIMIT 1")
|
| 261 |
+
result = await cursor.fetchone()
|
| 262 |
+
return orjson.loads(result[0]) if result else None
|
| 263 |
+
except Exception as e:
|
| 264 |
+
logger.error(f"[Storage] ๅ ่ฝฝ{table}ๅคฑ่ดฅ: {e}")
|
| 265 |
+
return None
|
| 266 |
+
|
| 267 |
+
async def _save_db(self, table: str, data: Dict) -> None:
|
| 268 |
+
"""ไฟๅญๅฐDB"""
|
| 269 |
+
try:
|
| 270 |
+
async with self._pool.acquire() as conn:
|
| 271 |
+
async with conn.cursor() as cursor:
|
| 272 |
+
json_data = orjson.dumps(data).decode()
|
| 273 |
+
await cursor.execute(f"SELECT id FROM {table} ORDER BY id DESC LIMIT 1")
|
| 274 |
+
result = await cursor.fetchone()
|
| 275 |
+
|
| 276 |
+
if result:
|
| 277 |
+
await cursor.execute(f"UPDATE {table} SET data = %s WHERE id = %s", (json_data, result[0]))
|
| 278 |
+
else:
|
| 279 |
+
await cursor.execute(f"INSERT INTO {table} (data) VALUES (%s)", (json_data,))
|
| 280 |
+
except Exception as e:
|
| 281 |
+
logger.error(f"[Storage] ไฟๅญ{table}ๅคฑ่ดฅ: {e}")
|
| 282 |
+
raise
|
| 283 |
+
|
| 284 |
+
async def load_tokens(self) -> Dict[str, Any]:
|
| 285 |
+
"""ๅ ่ฝฝtoken"""
|
| 286 |
+
return await self._file.load_tokens()
|
| 287 |
+
|
| 288 |
+
async def save_tokens(self, data: Dict[str, Any]) -> None:
|
| 289 |
+
"""ไฟๅญtoken"""
|
| 290 |
+
await self._file.save_tokens(data)
|
| 291 |
+
await self._save_db("grok_tokens", data)
|
| 292 |
+
|
| 293 |
+
async def load_config(self) -> Dict[str, Any]:
|
| 294 |
+
"""ๅ ่ฝฝ้
็ฝฎ"""
|
| 295 |
+
return await self._file.load_config()
|
| 296 |
+
|
| 297 |
+
async def save_config(self, data: Dict[str, Any]) -> None:
|
| 298 |
+
"""ไฟๅญ้
็ฝฎ"""
|
| 299 |
+
await self._file.save_config(data)
|
| 300 |
+
await self._save_db("grok_settings", data)
|
| 301 |
+
|
| 302 |
+
async def close(self) -> None:
|
| 303 |
+
"""ๅ
ณ้ญ่ฟๆฅ"""
|
| 304 |
+
if self._pool:
|
| 305 |
+
self._pool.close()
|
| 306 |
+
await self._pool.wait_closed()
|
| 307 |
+
logger.info("[Storage] MySQLๅทฒๅ
ณ้ญ")
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
class RedisStorage(BaseStorage):
|
| 311 |
+
"""Redisๅญๅจ"""
|
| 312 |
+
|
| 313 |
+
def __init__(self, redis_url: str, data_dir: Path):
|
| 314 |
+
self.redis_url = redis_url
|
| 315 |
+
self.data_dir = data_dir
|
| 316 |
+
self._redis = None
|
| 317 |
+
self._file = FileStorage(data_dir)
|
| 318 |
+
|
| 319 |
+
async def init_db(self) -> None:
|
| 320 |
+
"""ๅๅงๅRedis"""
|
| 321 |
+
try:
|
| 322 |
+
import redis.asyncio as aioredis
|
| 323 |
+
parsed = urlparse(self.redis_url)
|
| 324 |
+
db = int(parsed.path.lstrip('/')) if parsed.path and parsed.path != '/' else 0
|
| 325 |
+
logger.info(f"[Storage] Redis: {parsed.hostname}:{parsed.port or 6379}/{db}")
|
| 326 |
+
|
| 327 |
+
self._redis = aioredis.Redis.from_url(
|
| 328 |
+
self.redis_url, encoding="utf-8", decode_responses=True
|
| 329 |
+
)
|
| 330 |
+
|
| 331 |
+
await self._redis.ping()
|
| 332 |
+
logger.info(f"[Storage] Redis่ฟๆฅๆๅ")
|
| 333 |
+
|
| 334 |
+
await self._file.init_db()
|
| 335 |
+
await self._sync_data()
|
| 336 |
+
|
| 337 |
+
except ImportError:
|
| 338 |
+
raise Exception("redisๆชๅฎ่ฃ
")
|
| 339 |
+
except Exception as e:
|
| 340 |
+
logger.error(f"[Storage] Redisๅๅงๅๅคฑ่ดฅ: {e}")
|
| 341 |
+
raise
|
| 342 |
+
|
| 343 |
+
async def _sync_data(self) -> None:
|
| 344 |
+
"""ๅๆญฅๆฐๆฎ"""
|
| 345 |
+
try:
|
| 346 |
+
for key, file_func, key_name in [
|
| 347 |
+
("grok:tokens", self._file.load_tokens, "sso"),
|
| 348 |
+
("grok:settings", self._file.load_config, "global")
|
| 349 |
+
]:
|
| 350 |
+
data = await self._redis.get(key)
|
| 351 |
+
if data:
|
| 352 |
+
parsed = orjson.loads(data)
|
| 353 |
+
if key == "grok:tokens":
|
| 354 |
+
await self._file.save_tokens(parsed)
|
| 355 |
+
else:
|
| 356 |
+
await self._file.save_config(parsed)
|
| 357 |
+
logger.info(f"[Storage] {key.split(':')[1]}ๆฐๆฎๅทฒไปRedisๅๆญฅ")
|
| 358 |
+
else:
|
| 359 |
+
file_data = await file_func()
|
| 360 |
+
if file_data.get(key_name) or (key == "grok:tokens" and file_data.get("ssoSuper")):
|
| 361 |
+
await self._redis.set(key, orjson.dumps(file_data).decode())
|
| 362 |
+
logger.info(f"[Storage] {key.split(':')[1]}ๆฐๆฎๅทฒๅๅงๅๅฐRedis")
|
| 363 |
+
except Exception as e:
|
| 364 |
+
logger.warning(f"[Storage] ๅๆญฅๅคฑ่ดฅ: {e}")
|
| 365 |
+
|
| 366 |
+
async def _save_redis(self, key: str, data: Dict) -> None:
|
| 367 |
+
"""ไฟๅญๅฐRedis"""
|
| 368 |
+
try:
|
| 369 |
+
await self._redis.set(key, orjson.dumps(data).decode())
|
| 370 |
+
except Exception as e:
|
| 371 |
+
logger.error(f"[Storage] ไฟๅญRedisๅคฑ่ดฅ: {e}")
|
| 372 |
+
raise
|
| 373 |
+
|
| 374 |
+
async def load_tokens(self) -> Dict[str, Any]:
|
| 375 |
+
"""ๅ ่ฝฝtoken"""
|
| 376 |
+
return await self._file.load_tokens()
|
| 377 |
+
|
| 378 |
+
async def save_tokens(self, data: Dict[str, Any]) -> None:
|
| 379 |
+
"""ไฟๅญtoken"""
|
| 380 |
+
await self._file.save_tokens(data)
|
| 381 |
+
await self._save_redis("grok:tokens", data)
|
| 382 |
+
|
| 383 |
+
async def load_config(self) -> Dict[str, Any]:
|
| 384 |
+
"""ๅ ่ฝฝ้
็ฝฎ"""
|
| 385 |
+
return await self._file.load_config()
|
| 386 |
+
|
| 387 |
+
async def save_config(self, data: Dict[str, Any]) -> None:
|
| 388 |
+
"""ไฟๅญ้
็ฝฎ"""
|
| 389 |
+
await self._file.save_config(data)
|
| 390 |
+
await self._save_redis("grok:settings", data)
|
| 391 |
+
|
| 392 |
+
async def close(self) -> None:
|
| 393 |
+
"""ๅ
ณ้ญ่ฟๆฅ"""
|
| 394 |
+
if self._redis:
|
| 395 |
+
await self._redis.close()
|
| 396 |
+
logger.info("[Storage] Redisๅทฒๅ
ณ้ญ")
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
class StorageManager:
|
| 400 |
+
"""ๅญๅจ็ฎก็ๅจ๏ผๅไพ๏ผ"""
|
| 401 |
+
|
| 402 |
+
_instance: Optional['StorageManager'] = None
|
| 403 |
+
_storage: Optional[BaseStorage] = None
|
| 404 |
+
_initialized: bool = False
|
| 405 |
+
|
| 406 |
+
def __new__(cls):
|
| 407 |
+
if cls._instance is None:
|
| 408 |
+
cls._instance = super().__new__(cls)
|
| 409 |
+
return cls._instance
|
| 410 |
+
|
| 411 |
+
async def init(self) -> None:
|
| 412 |
+
"""ๅๅงๅๅญๅจ"""
|
| 413 |
+
if self._initialized:
|
| 414 |
+
return
|
| 415 |
+
|
| 416 |
+
mode = os.getenv("STORAGE_MODE", "file").lower()
|
| 417 |
+
url = os.getenv("DATABASE_URL", "")
|
| 418 |
+
data_dir = Path(__file__).parents[2] / "data"
|
| 419 |
+
|
| 420 |
+
classes = {"mysql": MysqlStorage, "redis": RedisStorage, "file": FileStorage}
|
| 421 |
+
|
| 422 |
+
if mode in ("mysql", "redis") and not url:
|
| 423 |
+
raise ValueError(f"{mode.upper()}ๆจกๅผ้่ฆDATABASE_URL")
|
| 424 |
+
|
| 425 |
+
storage_class = classes.get(mode, FileStorage)
|
| 426 |
+
self._storage = storage_class(url, data_dir) if mode != "file" else storage_class(data_dir)
|
| 427 |
+
|
| 428 |
+
await self._storage.init_db()
|
| 429 |
+
self._initialized = True
|
| 430 |
+
logger.info(f"[Storage] ไฝฟ็จ{mode}ๆจกๅผ")
|
| 431 |
+
|
| 432 |
+
def get_storage(self) -> BaseStorage:
|
| 433 |
+
"""่ทๅๅญๅจๅฎไพ"""
|
| 434 |
+
if not self._initialized or not self._storage:
|
| 435 |
+
raise RuntimeError("StorageManagerๆชๅๅงๅ")
|
| 436 |
+
return self._storage
|
| 437 |
+
|
| 438 |
+
async def close(self) -> None:
|
| 439 |
+
"""ๅ
ณ้ญๅญๅจ"""
|
| 440 |
+
if self._storage and hasattr(self._storage, 'close'):
|
| 441 |
+
await self._storage.close()
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
# ๅ
จๅฑๅฎไพ
|
| 445 |
+
storage_manager = StorageManager()
|
app/models/grok_models.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Grok ๆจกๅ้
็ฝฎๅๆไธพๅฎไน"""
|
| 2 |
+
|
| 3 |
+
from enum import Enum
|
| 4 |
+
from typing import Dict, Any, Tuple
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
# ๆจกๅ้
็ฝฎ
|
| 8 |
+
_MODEL_CONFIG: Dict[str, Dict[str, Any]] = {
|
| 9 |
+
"grok-3-fast": {
|
| 10 |
+
"grok_model": ("grok-3", "MODEL_MODE_FAST"),
|
| 11 |
+
"rate_limit_model": "grok-3",
|
| 12 |
+
"cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐ็จ"},
|
| 13 |
+
"requires_super": False,
|
| 14 |
+
"display_name": "Grok 3 Fast",
|
| 15 |
+
"description": "Fast and efficient Grok 3 model",
|
| 16 |
+
"raw_model_path": "xai/grok-3",
|
| 17 |
+
"default_temperature": 1.0,
|
| 18 |
+
"default_max_output_tokens": 8192,
|
| 19 |
+
"supported_max_output_tokens": 131072,
|
| 20 |
+
"default_top_p": 0.95
|
| 21 |
+
},
|
| 22 |
+
"grok-4-fast": {
|
| 23 |
+
"grok_model": ("grok-4-mini-thinking-tahoe", "MODEL_MODE_GROK_4_MINI_THINKING"),
|
| 24 |
+
"rate_limit_model": "grok-4-mini-thinking-tahoe",
|
| 25 |
+
"cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐ็จ"},
|
| 26 |
+
"requires_super": False,
|
| 27 |
+
"display_name": "Grok 4 Fast",
|
| 28 |
+
"description": "Fast version of Grok 4 with mini thinking capabilities",
|
| 29 |
+
"raw_model_path": "xai/grok-4-mini-thinking-tahoe",
|
| 30 |
+
"default_temperature": 1.0,
|
| 31 |
+
"default_max_output_tokens": 8192,
|
| 32 |
+
"supported_max_output_tokens": 131072,
|
| 33 |
+
"default_top_p": 0.95
|
| 34 |
+
},
|
| 35 |
+
"grok-4-fast-expert": {
|
| 36 |
+
"grok_model": ("grok-4-mini-thinking-tahoe", "MODEL_MODE_EXPERT"),
|
| 37 |
+
"rate_limit_model": "grok-4-mini-thinking-tahoe",
|
| 38 |
+
"cost": {"type": "high_cost", "multiplier": 4, "description": "่ฎก4ๆฌก่ฐ็จ"},
|
| 39 |
+
"requires_super": False,
|
| 40 |
+
"display_name": "Grok 4 Fast Expert",
|
| 41 |
+
"description": "Expert mode of Grok 4 Fast with enhanced reasoning",
|
| 42 |
+
"raw_model_path": "xai/grok-4-mini-thinking-tahoe",
|
| 43 |
+
"default_temperature": 1.0,
|
| 44 |
+
"default_max_output_tokens": 32768,
|
| 45 |
+
"supported_max_output_tokens": 131072,
|
| 46 |
+
"default_top_p": 0.95
|
| 47 |
+
},
|
| 48 |
+
"grok-4-expert": {
|
| 49 |
+
"grok_model": ("grok-4", "MODEL_MODE_EXPERT"),
|
| 50 |
+
"rate_limit_model": "grok-4",
|
| 51 |
+
"cost": {"type": "high_cost", "multiplier": 4, "description": "่ฎก4ๆฌก่ฐ็จ"},
|
| 52 |
+
"requires_super": False,
|
| 53 |
+
"display_name": "Grok 4 Expert",
|
| 54 |
+
"description": "Full Grok 4 model with expert mode capabilities",
|
| 55 |
+
"raw_model_path": "xai/grok-4",
|
| 56 |
+
"default_temperature": 1.0,
|
| 57 |
+
"default_max_output_tokens": 32768,
|
| 58 |
+
"supported_max_output_tokens": 131072,
|
| 59 |
+
"default_top_p": 0.95
|
| 60 |
+
},
|
| 61 |
+
"grok-4-heavy": {
|
| 62 |
+
"grok_model": ("grok-4-heavy", "MODEL_MODE_HEAVY"),
|
| 63 |
+
"rate_limit_model": "grok-4-heavy",
|
| 64 |
+
"cost": {"type": "independent", "multiplier": 1, "description": "็ฌ็ซ่ฎก่ดน๏ผๅชๆSuper็จๆทๅฏ็จ"},
|
| 65 |
+
"requires_super": True,
|
| 66 |
+
"display_name": "Grok 4 Heavy",
|
| 67 |
+
"description": "Most powerful Grok 4 model with heavy computational capabilities. Requires Super Token for access.",
|
| 68 |
+
"raw_model_path": "xai/grok-4-heavy",
|
| 69 |
+
"default_temperature": 1.0,
|
| 70 |
+
"default_max_output_tokens": 65536,
|
| 71 |
+
"supported_max_output_tokens": 131072,
|
| 72 |
+
"default_top_p": 0.95
|
| 73 |
+
},
|
| 74 |
+
"grok-4.1": {
|
| 75 |
+
"grok_model": ("grok-4-1-non-thinking-w-tool", "MODEL_MODE_GROK_4_1"),
|
| 76 |
+
"rate_limit_model": "grok-4-1-non-thinking-w-tool",
|
| 77 |
+
"cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐ็จ"},
|
| 78 |
+
"requires_super": False,
|
| 79 |
+
"display_name": "Grok 4.1",
|
| 80 |
+
"description": "Latest Grok 4.1 model with tool capabilities",
|
| 81 |
+
"raw_model_path": "xai/grok-4-1-non-thinking-w-tool",
|
| 82 |
+
"default_temperature": 1.0,
|
| 83 |
+
"default_max_output_tokens": 8192,
|
| 84 |
+
"supported_max_output_tokens": 131072,
|
| 85 |
+
"default_top_p": 0.95
|
| 86 |
+
},
|
| 87 |
+
"grok-4.1-thinking": {
|
| 88 |
+
"grok_model": ("grok-4-1-thinking-1108b", "MODEL_MODE_AUTO"),
|
| 89 |
+
"rate_limit_model": "grok-4-1-thinking-1108b",
|
| 90 |
+
"cost": {"type": "high_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐ็จ"},
|
| 91 |
+
"requires_super": False,
|
| 92 |
+
"display_name": "Grok 4.1 Thinking",
|
| 93 |
+
"description": "Grok 4.1 model with advanced thinking and tool capabilities",
|
| 94 |
+
"raw_model_path": "xai/grok-4-1-thinking-1108b",
|
| 95 |
+
"default_temperature": 1.0,
|
| 96 |
+
"default_max_output_tokens": 32768,
|
| 97 |
+
"supported_max_output_tokens": 131072,
|
| 98 |
+
"default_top_p": 0.95
|
| 99 |
+
},
|
| 100 |
+
"grok-imagine-0.9": {
|
| 101 |
+
"grok_model": ("grok-3", "MODEL_MODE_FAST"),
|
| 102 |
+
"rate_limit_model": "grok-3",
|
| 103 |
+
"cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐ็จ"},
|
| 104 |
+
"requires_super": False,
|
| 105 |
+
"display_name": "Grok Imagine 0.9",
|
| 106 |
+
"description": "Image and video generation model. Supports text-to-image and image-to-video generation.",
|
| 107 |
+
"raw_model_path": "xai/grok-imagine-0.9",
|
| 108 |
+
"default_temperature": 1.0,
|
| 109 |
+
"default_max_output_tokens": 8192,
|
| 110 |
+
"supported_max_output_tokens": 131072,
|
| 111 |
+
"default_top_p": 0.95,
|
| 112 |
+
"is_video_model": True
|
| 113 |
+
}
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class TokenType(Enum):
|
| 118 |
+
"""Token็ฑปๅ"""
|
| 119 |
+
NORMAL = "ssoNormal"
|
| 120 |
+
SUPER = "ssoSuper"
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class Models(Enum):
|
| 124 |
+
"""ๆฏๆ็ๆจกๅ"""
|
| 125 |
+
GROK_3_FAST = "grok-3-fast"
|
| 126 |
+
GROK_4_1 = "grok-4.1"
|
| 127 |
+
GROK_4_1_THINKING = "grok-4.1-thinking"
|
| 128 |
+
GROK_4_FAST = "grok-4-fast"
|
| 129 |
+
GROK_4_FAST_EXPERT = "grok-4-fast-expert"
|
| 130 |
+
GROK_4_EXPERT = "grok-4-expert"
|
| 131 |
+
GROK_4_HEAVY = "grok-4-heavy"
|
| 132 |
+
GROK_IMAGINE_0_9 = "grok-imagine-0.9"
|
| 133 |
+
|
| 134 |
+
@classmethod
|
| 135 |
+
def get_model_info(cls, model: str) -> Dict[str, Any]:
|
| 136 |
+
"""่ทๅๆจกๅ้
็ฝฎ"""
|
| 137 |
+
return _MODEL_CONFIG.get(model, {})
|
| 138 |
+
|
| 139 |
+
@classmethod
|
| 140 |
+
def is_valid_model(cls, model: str) -> bool:
|
| 141 |
+
"""ๆฃๆฅๆจกๅๆฏๅฆๆๆ"""
|
| 142 |
+
return model in _MODEL_CONFIG
|
| 143 |
+
|
| 144 |
+
@classmethod
|
| 145 |
+
def to_grok(cls, model: str) -> Tuple[str, str]:
|
| 146 |
+
"""่ฝฌๆขไธบGrokๅ
้จๆจกๅๅๅๆจกๅผ
|
| 147 |
+
|
| 148 |
+
Returns:
|
| 149 |
+
(ๆจกๅๅ, ๆจกๅผ็ฑปๅ) ๅ
็ป
|
| 150 |
+
"""
|
| 151 |
+
config = _MODEL_CONFIG.get(model)
|
| 152 |
+
return config["grok_model"] if config else (model, "MODEL_MODE_FAST")
|
| 153 |
+
|
| 154 |
+
@classmethod
|
| 155 |
+
def to_rate_limit(cls, model: str) -> str:
|
| 156 |
+
"""่ฝฌๆขไธบ้็้ๅถๆจกๅๅ"""
|
| 157 |
+
config = _MODEL_CONFIG.get(model)
|
| 158 |
+
return config["rate_limit_model"] if config else model
|
| 159 |
+
|
| 160 |
+
@classmethod
|
| 161 |
+
def get_all_model_names(cls) -> list[str]:
|
| 162 |
+
"""่ทๅๆๆๆจกๅๅ็งฐ"""
|
| 163 |
+
return list(_MODEL_CONFIG.keys())
|
app/models/openai_schema.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""OpenAI ่ฏทๆฑ-ๅๅบๆจกๅๅฎไน"""
|
| 2 |
+
|
| 3 |
+
from fastapi import HTTPException
|
| 4 |
+
from typing import Optional, List, Union, Dict, Any
|
| 5 |
+
from pydantic import BaseModel, Field, field_validator
|
| 6 |
+
|
| 7 |
+
from app.models.grok_models import Models
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class OpenAIChatRequest(BaseModel):
|
| 11 |
+
"""OpenAI่ๅคฉ่ฏทๆฑ"""
|
| 12 |
+
|
| 13 |
+
model: str = Field(..., description="ๆจกๅๅ็งฐ", min_length=1)
|
| 14 |
+
messages: List[Dict[str, Any]] = Field(..., description="ๆถๆฏๅ่กจ", min_length=1)
|
| 15 |
+
stream: bool = Field(False, description="ๆตๅผๅๅบ")
|
| 16 |
+
temperature: Optional[float] = Field(0.7, ge=0, le=2, description="้ๆ ทๆธฉๅบฆ")
|
| 17 |
+
max_tokens: Optional[int] = Field(None, ge=1, le=100000, description="ๆๅคงTokenๆฐ")
|
| 18 |
+
top_p: Optional[float] = Field(1.0, ge=0, le=1, description="้ๆ ทๅๆฐ")
|
| 19 |
+
|
| 20 |
+
@classmethod
|
| 21 |
+
@field_validator('messages')
|
| 22 |
+
def validate_messages(cls, v):
|
| 23 |
+
"""้ช่ฏๆถๆฏๆ ผๅผ"""
|
| 24 |
+
if not v:
|
| 25 |
+
raise HTTPException(status_code=400, detail="ๆถๆฏๅ่กจไธ่ฝไธบ็ฉบ")
|
| 26 |
+
|
| 27 |
+
for msg in v:
|
| 28 |
+
if not isinstance(msg, dict):
|
| 29 |
+
raise HTTPException(status_code=400, detail="ๆฏไธชๆถๆฏๅฟ
้กปๆฏๅญๅ
ธ")
|
| 30 |
+
if 'role' not in msg:
|
| 31 |
+
raise HTTPException(status_code=400, detail="ๆถๆฏ็ผบๅฐ 'role' ๅญๆฎต")
|
| 32 |
+
if 'content' not in msg:
|
| 33 |
+
raise HTTPException(status_code=400, detail="ๆถๆฏ็ผบๅฐ 'content' ๅญๆฎต")
|
| 34 |
+
if msg['role'] not in ['system', 'user', 'assistant']:
|
| 35 |
+
raise HTTPException(
|
| 36 |
+
status_code=400,
|
| 37 |
+
detail=f"ๆ ๆ่ง่ฒ '{msg['role']}', ๅฟ
้กปๆฏ system/user/assistant"
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
return v
|
| 41 |
+
|
| 42 |
+
@classmethod
|
| 43 |
+
@field_validator('model')
|
| 44 |
+
def validate_model(cls, v):
|
| 45 |
+
"""้ช่ฏๆจกๅๅ็งฐ"""
|
| 46 |
+
if not Models.is_valid_model(v):
|
| 47 |
+
supported = Models.get_all_model_names()
|
| 48 |
+
raise HTTPException(
|
| 49 |
+
status_code=400,
|
| 50 |
+
detail=f"ไธๆฏๆ็ๆจกๅ '{v}', ๆฏๆ: {', '.join(supported)}"
|
| 51 |
+
)
|
| 52 |
+
return v
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class OpenAIChatCompletionMessage(BaseModel):
|
| 56 |
+
"""่ๅคฉๅฎๆๆถๆฏ"""
|
| 57 |
+
role: str = Field(..., description="่ง่ฒ")
|
| 58 |
+
content: str = Field(..., description="ๅ
ๅฎน")
|
| 59 |
+
reference_id: Optional[str] = Field(default=None, description="ๅ่ID")
|
| 60 |
+
annotations: Optional[List[str]] = Field(default=None, description="ๆณจ้")
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class OpenAIChatCompletionChoice(BaseModel):
|
| 64 |
+
"""่ๅคฉๅฎๆ้้กน"""
|
| 65 |
+
index: int = Field(..., description="็ดขๅผ")
|
| 66 |
+
message: OpenAIChatCompletionMessage = Field(..., description="ๆถๆฏ")
|
| 67 |
+
logprobs: Optional[float] = Field(default=None, description="ๅฏนๆฐๆฆ็")
|
| 68 |
+
finish_reason: str = Field(default="stop", description="ๅฎๆๅๅ ")
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class OpenAIChatCompletionResponse(BaseModel):
|
| 72 |
+
"""่ๅคฉๅฎๆๅๅบ"""
|
| 73 |
+
id: str = Field(..., description="ๅๅบID")
|
| 74 |
+
object: str = Field("chat.completion", description="ๅฏน่ฑก็ฑปๅ")
|
| 75 |
+
created: int = Field(..., description="ๅๅปบๆถ้ดๆณ")
|
| 76 |
+
model: str = Field(..., description="ๆจกๅ")
|
| 77 |
+
choices: List[OpenAIChatCompletionChoice] = Field(..., description="้้กน")
|
| 78 |
+
usage: Optional[Dict[str, Any]] = Field(None, description="ไปค็ไฝฟ็จ")
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class OpenAIChatCompletionChunkMessage(BaseModel):
|
| 82 |
+
"""ๆตๅผๆถๆฏ็ๆฎต"""
|
| 83 |
+
role: str = Field(..., description="่ง่ฒ")
|
| 84 |
+
content: str = Field(..., description="ๅ
ๅฎน")
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class OpenAIChatCompletionChunkChoice(BaseModel):
|
| 88 |
+
"""ๆตๅผ้้กน"""
|
| 89 |
+
index: int = Field(..., description="็ดขๅผ")
|
| 90 |
+
delta: Optional[Union[Dict[str, Any], OpenAIChatCompletionChunkMessage]] = Field(
|
| 91 |
+
None, description="Deltaๆฐๆฎ"
|
| 92 |
+
)
|
| 93 |
+
finish_reason: Optional[str] = Field(None, description="ๅฎๆๅๅ ")
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class OpenAIChatCompletionChunkResponse(BaseModel):
|
| 97 |
+
"""ๆตๅผ่ๅคฉๅๅบ"""
|
| 98 |
+
id: str = Field(..., description="ๅๅบID")
|
| 99 |
+
object: str = Field(default="chat.completion.chunk", description="ๅฏน่ฑก็ฑปๅ")
|
| 100 |
+
created: int = Field(..., description="ๅๅปบๆถ้ดๆณ")
|
| 101 |
+
model: str = Field(..., description="ๆจกๅ")
|
| 102 |
+
system_fingerprint: Optional[str] = Field(default=None, description="็ณป็ปๆ็บน")
|
| 103 |
+
choices: List[OpenAIChatCompletionChunkChoice] = Field(..., description="้้กน")
|
app/services/api_keys.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""API Key ็ฎก็ๅจ - ๅค็จๆทๅฏ้ฅ็ฎก็"""
|
| 2 |
+
|
| 3 |
+
import orjson
|
| 4 |
+
import time
|
| 5 |
+
import secrets
|
| 6 |
+
import asyncio
|
| 7 |
+
from typing import List, Dict, Optional
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
|
| 10 |
+
from app.core.logger import logger
|
| 11 |
+
from app.core.config import setting
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class ApiKeyManager:
|
| 15 |
+
"""API Key ็ฎก็ๆๅก"""
|
| 16 |
+
|
| 17 |
+
_instance = None
|
| 18 |
+
|
| 19 |
+
def __new__(cls):
|
| 20 |
+
if cls._instance is None:
|
| 21 |
+
cls._instance = super().__new__(cls)
|
| 22 |
+
return cls._instance
|
| 23 |
+
|
| 24 |
+
def __init__(self):
|
| 25 |
+
if hasattr(self, '_initialized'):
|
| 26 |
+
return
|
| 27 |
+
|
| 28 |
+
self.file_path = Path(__file__).parents[2] / "data" / "api_keys.json"
|
| 29 |
+
self._keys: List[Dict] = []
|
| 30 |
+
self._lock = asyncio.Lock()
|
| 31 |
+
self._loaded = False
|
| 32 |
+
|
| 33 |
+
self._initialized = True
|
| 34 |
+
logger.debug(f"[ApiKey] ๅๅงๅๅฎๆ: {self.file_path}")
|
| 35 |
+
|
| 36 |
+
async def init(self):
|
| 37 |
+
"""ๅๅงๅๅ ่ฝฝๆฐๆฎ"""
|
| 38 |
+
if not self._loaded:
|
| 39 |
+
await self._load_data()
|
| 40 |
+
|
| 41 |
+
async def _load_data(self):
|
| 42 |
+
"""ๅ ่ฝฝ API Keys"""
|
| 43 |
+
if self._loaded:
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
if not self.file_path.exists():
|
| 47 |
+
self._keys = []
|
| 48 |
+
self._loaded = True
|
| 49 |
+
return
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
async with self._lock:
|
| 53 |
+
if self.file_path.exists():
|
| 54 |
+
content = await asyncio.to_thread(self.file_path.read_bytes)
|
| 55 |
+
if content:
|
| 56 |
+
self._keys = orjson.loads(content)
|
| 57 |
+
self._loaded = True
|
| 58 |
+
logger.debug(f"[ApiKey] ๅ ่ฝฝไบ {len(self._keys)} ไธช API Key")
|
| 59 |
+
except Exception as e:
|
| 60 |
+
logger.error(f"[ApiKey] ๅ ่ฝฝๅคฑ่ดฅ: {e}")
|
| 61 |
+
self._keys = []
|
| 62 |
+
self._loaded = True # ๅณไฝฟๅ ่ฝฝๅคฑ่ดฅไน่ฎคไธบๅทฒๅฐ่ฏๅ ่ฝฝ๏ผ้ฒๆญขๅ็ปญไฟๅญๆธ
็ฉบๆฐๆฎ๏ผๆ่
ๆๅบๅผๅธธ๏ผ
|
| 63 |
+
|
| 64 |
+
async def _save_data(self):
|
| 65 |
+
"""ไฟๅญ API Keys"""
|
| 66 |
+
if not self._loaded:
|
| 67 |
+
logger.warning("[ApiKey] ๅฐ่ฏๅจๆฐๆฎๆชๅ ่ฝฝๆถไฟๅญ๏ผๅทฒๅๆถๆไฝไปฅ้ฒ่ฆ็ๆฐๆฎ")
|
| 68 |
+
return
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
# ็กฎไฟ็ฎๅฝๅญๅจ
|
| 72 |
+
self.file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 73 |
+
|
| 74 |
+
async with self._lock:
|
| 75 |
+
content = orjson.dumps(self._keys, option=orjson.OPT_INDENT_2)
|
| 76 |
+
await asyncio.to_thread(self.file_path.write_bytes, content)
|
| 77 |
+
except Exception as e:
|
| 78 |
+
logger.error(f"[ApiKey] ไฟๅญๅคฑ่ดฅ: {e}")
|
| 79 |
+
|
| 80 |
+
def generate_key(self) -> str:
|
| 81 |
+
"""็ๆไธไธชๆฐ็ sk- ๅผๅคด็ key"""
|
| 82 |
+
return f"sk-{secrets.token_urlsafe(24)}"
|
| 83 |
+
|
| 84 |
+
async def add_key(self, name: str) -> Dict:
|
| 85 |
+
"""ๆทปๅ API Key"""
|
| 86 |
+
new_key = {
|
| 87 |
+
"key": self.generate_key(),
|
| 88 |
+
"name": name,
|
| 89 |
+
"created_at": int(time.time()),
|
| 90 |
+
"is_active": True
|
| 91 |
+
}
|
| 92 |
+
self._keys.append(new_key)
|
| 93 |
+
await self._save_data()
|
| 94 |
+
logger.info(f"[ApiKey] ๆทปๅ ๆฐKey: {name}")
|
| 95 |
+
return new_key
|
| 96 |
+
|
| 97 |
+
async def batch_add_keys(self, name_prefix: str, count: int) -> List[Dict]:
|
| 98 |
+
"""ๆน้ๆทปๅ API Key"""
|
| 99 |
+
new_keys = []
|
| 100 |
+
for i in range(1, count + 1):
|
| 101 |
+
name = f"{name_prefix}-{i}" if count > 1 else name_prefix
|
| 102 |
+
new_keys.append({
|
| 103 |
+
"key": self.generate_key(),
|
| 104 |
+
"name": name,
|
| 105 |
+
"created_at": int(time.time()),
|
| 106 |
+
"is_active": True
|
| 107 |
+
})
|
| 108 |
+
|
| 109 |
+
self._keys.extend(new_keys)
|
| 110 |
+
await self._save_data()
|
| 111 |
+
logger.info(f"[ApiKey] ๆน้ๆทปๅ {count} ไธช Key, ๅ็ผ: {name_prefix}")
|
| 112 |
+
return new_keys
|
| 113 |
+
|
| 114 |
+
async def delete_key(self, key: str) -> bool:
|
| 115 |
+
"""ๅ ้ค API Key"""
|
| 116 |
+
initial_len = len(self._keys)
|
| 117 |
+
self._keys = [k for k in self._keys if k["key"] != key]
|
| 118 |
+
|
| 119 |
+
if len(self._keys) != initial_len:
|
| 120 |
+
await self._save_data()
|
| 121 |
+
logger.info(f"[ApiKey] ๅ ้คKey: {key[:10]}...")
|
| 122 |
+
return True
|
| 123 |
+
return False
|
| 124 |
+
|
| 125 |
+
async def batch_delete_keys(self, keys: List[str]) -> int:
|
| 126 |
+
"""ๆน้ๅ ้ค API Key"""
|
| 127 |
+
initial_len = len(self._keys)
|
| 128 |
+
self._keys = [k for k in self._keys if k["key"] not in keys]
|
| 129 |
+
|
| 130 |
+
deleted_count = initial_len - len(self._keys)
|
| 131 |
+
if deleted_count > 0:
|
| 132 |
+
await self._save_data()
|
| 133 |
+
logger.info(f"[ApiKey] ๆน้ๅ ้ค {deleted_count} ไธช Key")
|
| 134 |
+
return deleted_count
|
| 135 |
+
|
| 136 |
+
async def update_key_status(self, key: str, is_active: bool) -> bool:
|
| 137 |
+
"""ๆดๆฐ Key ็ถๆ"""
|
| 138 |
+
for k in self._keys:
|
| 139 |
+
if k["key"] == key:
|
| 140 |
+
k["is_active"] = is_active
|
| 141 |
+
await self._save_data()
|
| 142 |
+
return True
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
async def batch_update_keys_status(self, keys: List[str], is_active: bool) -> int:
|
| 146 |
+
"""ๆน้ๆดๆฐ Key ็ถๆ"""
|
| 147 |
+
updated_count = 0
|
| 148 |
+
for k in self._keys:
|
| 149 |
+
if k["key"] in keys:
|
| 150 |
+
if k["is_active"] != is_active:
|
| 151 |
+
k["is_active"] = is_active
|
| 152 |
+
updated_count += 1
|
| 153 |
+
|
| 154 |
+
if updated_count > 0:
|
| 155 |
+
await self._save_data()
|
| 156 |
+
logger.info(f"[ApiKey] ๆน้ๆดๆฐ {updated_count} ไธช Key ็ถๆไธบ: {is_active}")
|
| 157 |
+
return updated_count
|
| 158 |
+
|
| 159 |
+
async def update_key_name(self, key: str, name: str) -> bool:
|
| 160 |
+
"""ๆดๆฐ Key ๅคๆณจ"""
|
| 161 |
+
for k in self._keys:
|
| 162 |
+
if k["key"] == key:
|
| 163 |
+
k["name"] = name
|
| 164 |
+
await self._save_data()
|
| 165 |
+
return True
|
| 166 |
+
return False
|
| 167 |
+
|
| 168 |
+
def validate_key(self, key: str) -> Optional[Dict]:
|
| 169 |
+
"""้ช่ฏ Key๏ผ่ฟๅ Key ไฟกๆฏ"""
|
| 170 |
+
# 1. ๆฃๆฅๅ
จๅฑ้
็ฝฎ็ Key (ไฝไธบ้ป่ฎค admin key)
|
| 171 |
+
global_key = setting.grok_config.get("api_key")
|
| 172 |
+
if global_key and key == global_key:
|
| 173 |
+
return {
|
| 174 |
+
"key": global_key,
|
| 175 |
+
"name": "้ป่ฎค็ฎก็ๅ",
|
| 176 |
+
"is_active": True,
|
| 177 |
+
"is_admin": True
|
| 178 |
+
}
|
| 179 |
+
|
| 180 |
+
# 2. ๆฃๆฅๅค Key ๅ่กจ
|
| 181 |
+
for k in self._keys:
|
| 182 |
+
if k["key"] == key:
|
| 183 |
+
if k["is_active"]:
|
| 184 |
+
return {**k, "is_admin": False} # ๆฎ้ Key ไนๅฏไปฅ่งไธบ้็ฎก็ๅ? ๆไธๅบๅๆ้๏ผๅชๅ่บซไปฝ่ฏๅซ
|
| 185 |
+
return None
|
| 186 |
+
|
| 187 |
+
return None
|
| 188 |
+
|
| 189 |
+
def get_all_keys(self) -> List[Dict]:
|
| 190 |
+
"""่ทๅๆๆ Keys"""
|
| 191 |
+
return self._keys
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
# ๅ
จๅฑๅฎไพ
|
| 195 |
+
api_key_manager = ApiKeyManager()
|
app/services/grok/cache.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""็ผๅญๆๅกๆจกๅ - ๆไพๅพ็ๅ่ง้ข็ไธ่ฝฝใ็ผๅญๅๆธ
็ๅ่ฝ"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import Optional, Tuple
|
| 7 |
+
from curl_cffi.requests import AsyncSession
|
| 8 |
+
|
| 9 |
+
from app.core.config import setting
|
| 10 |
+
from app.core.logger import logger
|
| 11 |
+
from app.services.grok.statsig import get_dynamic_headers
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# ๅธธ้ๅฎไน
|
| 15 |
+
MIME_TYPES = {
|
| 16 |
+
'.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.png': 'image/png',
|
| 17 |
+
'.gif': 'image/gif', '.webp': 'image/webp', '.bmp': 'image/bmp',
|
| 18 |
+
}
|
| 19 |
+
DEFAULT_MIME = 'image/jpeg'
|
| 20 |
+
ASSETS_URL = "https://assets.grok.com"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class CacheService:
|
| 24 |
+
"""็ผๅญๆๅกๅบ็ฑป"""
|
| 25 |
+
|
| 26 |
+
def __init__(self, cache_type: str, timeout: float = 30.0):
|
| 27 |
+
self.cache_type = cache_type
|
| 28 |
+
self.cache_dir = Path(f"data/temp/{cache_type}")
|
| 29 |
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
| 30 |
+
self.timeout = timeout
|
| 31 |
+
self._cleanup_lock = asyncio.Lock()
|
| 32 |
+
|
| 33 |
+
def _get_path(self, file_path: str) -> Path:
|
| 34 |
+
"""่ฝฌๆขๆไปถ่ทฏๅพไธบ็ผๅญ่ทฏๅพ"""
|
| 35 |
+
return self.cache_dir / file_path.lstrip('/').replace('/', '-')
|
| 36 |
+
|
| 37 |
+
def _log(self, level: str, msg: str):
|
| 38 |
+
"""็ปไธๆฅๅฟ่พๅบ"""
|
| 39 |
+
getattr(logger, level)(f"[{self.cache_type.upper()}Cache] {msg}")
|
| 40 |
+
|
| 41 |
+
def _build_headers(self, file_path: str, auth_token: str) -> dict:
|
| 42 |
+
"""ๆๅปบ่ฏทๆฑๅคด"""
|
| 43 |
+
cf = setting.grok_config.get("cf_clearance", "")
|
| 44 |
+
return {
|
| 45 |
+
**get_dynamic_headers(pathname=file_path),
|
| 46 |
+
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
| 47 |
+
"Sec-Fetch-Dest": "document",
|
| 48 |
+
"Sec-Fetch-Mode": "navigate",
|
| 49 |
+
"Sec-Fetch-Site": "same-site",
|
| 50 |
+
"Sec-Fetch-User": "?1",
|
| 51 |
+
"Upgrade-Insecure-Requests": "1",
|
| 52 |
+
"Referer": "https://grok.com/",
|
| 53 |
+
"Cookie": f"{auth_token};{cf}" if cf else auth_token
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
async def download(self, file_path: str, auth_token: str, timeout: Optional[float] = None) -> Optional[Path]:
|
| 57 |
+
"""ไธ่ฝฝๅนถ็ผๅญๆไปถ"""
|
| 58 |
+
cache_path = self._get_path(file_path)
|
| 59 |
+
if cache_path.exists():
|
| 60 |
+
self._log("debug", "ๆไปถๅทฒ็ผๅญ")
|
| 61 |
+
return cache_path
|
| 62 |
+
|
| 63 |
+
# ๅคๅฑ้่ฏ๏ผๅฏ้
็ฝฎ็ถๆ็ ๏ผ401/429็ญ๏ผ
|
| 64 |
+
retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
|
| 65 |
+
MAX_OUTER_RETRY = 3
|
| 66 |
+
|
| 67 |
+
for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎ้
้่ฏ3ๆฌก
|
| 68 |
+
try:
|
| 69 |
+
# ๅ
ๅฑ้่ฏ๏ผ403ไปฃ็ๆฑ ้่ฏ๏ผcacheไฝฟ็จ็ผๅญไปฃ็๏ผไธๆฏๆไปฃ็ๆฑ ๏ผๆไปฅ403ๅช้่ฏไธๆฌก๏ผ
|
| 70 |
+
max_403_retries = 5
|
| 71 |
+
retry_403_count = 0
|
| 72 |
+
|
| 73 |
+
while retry_403_count <= max_403_retries:
|
| 74 |
+
proxy = await setting.get_proxy_async("cache")
|
| 75 |
+
proxies = {"http": proxy, "https": proxy} if proxy else {}
|
| 76 |
+
|
| 77 |
+
if proxy and outer_retry == 0 and retry_403_count == 0:
|
| 78 |
+
self._log("debug", f"ไฝฟ็จไปฃ็: {proxy.split('@')[-1] if '@' in proxy else proxy}")
|
| 79 |
+
|
| 80 |
+
async with AsyncSession() as session:
|
| 81 |
+
url = f"{ASSETS_URL}{file_path}"
|
| 82 |
+
if outer_retry == 0 and retry_403_count == 0:
|
| 83 |
+
self._log("debug", f"ไธ่ฝฝ: {url}")
|
| 84 |
+
|
| 85 |
+
response = await session.get(
|
| 86 |
+
url,
|
| 87 |
+
headers=self._build_headers(file_path, auth_token),
|
| 88 |
+
proxies=proxies,
|
| 89 |
+
timeout=timeout or self.timeout,
|
| 90 |
+
allow_redirects=True,
|
| 91 |
+
impersonate="chrome133a"
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
# ๆฃๆฅ403้่ฏฏ - ๅ
ๅฑ้่ฏ(cacheไธไฝฟ็จไปฃ็ๆฑ ๏ผๆไปฅ็ดๆฅๅคฑ่ดฅ)
|
| 95 |
+
if response.status_code == 403:
|
| 96 |
+
retry_403_count += 1
|
| 97 |
+
|
| 98 |
+
if retry_403_count <= max_403_retries:
|
| 99 |
+
self._log("warning", f"้ๅฐ403้่ฏฏ๏ผๆญฃๅจ้่ฏ ({retry_403_count}/{max_403_retries})...")
|
| 100 |
+
await asyncio.sleep(0.5)
|
| 101 |
+
continue
|
| 102 |
+
|
| 103 |
+
self._log("error", f"403้่ฏฏ๏ผๅทฒ้่ฏ{retry_403_count-1}ๆฌก๏ผๆพๅผ")
|
| 104 |
+
return None
|
| 105 |
+
|
| 106 |
+
# ๆฃๆฅๅฏ้
็ฝฎ็ถๆ็ ้่ฏฏ - ๅคๅฑ้่ฏ
|
| 107 |
+
if response.status_code in retry_codes:
|
| 108 |
+
if outer_retry < MAX_OUTER_RETRY:
|
| 109 |
+
delay = (outer_retry + 1) * 0.1 # ๆธ่ฟๅปถ่ฟ๏ผ0.1s, 0.2s, 0.3s
|
| 110 |
+
self._log("warning", f"้ๅฐ{response.status_code}้่ฏฏ๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผ็ญๅพ
{delay}s...")
|
| 111 |
+
await asyncio.sleep(delay)
|
| 112 |
+
break # ่ทณๅบๅ
ๅฑๅพช็ฏ๏ผ่ฟๅ
ฅๅคๅฑ้่ฏ
|
| 113 |
+
else:
|
| 114 |
+
self._log("error", f"{response.status_code}้่ฏฏ๏ผๅทฒ้่ฏ{outer_retry}ๆฌก๏ผๆพๅผ")
|
| 115 |
+
return None
|
| 116 |
+
|
| 117 |
+
response.raise_for_status()
|
| 118 |
+
await asyncio.to_thread(cache_path.write_bytes, response.content)
|
| 119 |
+
|
| 120 |
+
if outer_retry > 0 or retry_403_count > 0:
|
| 121 |
+
self._log("info", f"้่ฏๆๅ๏ผ")
|
| 122 |
+
else:
|
| 123 |
+
self._log("debug", "็ผๅญๆๅ")
|
| 124 |
+
|
| 125 |
+
# ๅผๆญฅๆธ
็๏ผๅธฆ้่ฏฏๅค็๏ผ
|
| 126 |
+
asyncio.create_task(self._safe_cleanup())
|
| 127 |
+
return cache_path
|
| 128 |
+
|
| 129 |
+
except Exception as e:
|
| 130 |
+
if outer_retry < MAX_OUTER_RETRY - 1:
|
| 131 |
+
self._log("warning", f"ไธ่ฝฝๅผๅธธ: {e}๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})...")
|
| 132 |
+
await asyncio.sleep(0.5)
|
| 133 |
+
continue
|
| 134 |
+
|
| 135 |
+
self._log("error", f"ไธ่ฝฝๅคฑ่ดฅ: {e}๏ผๅทฒ้่ฏ{outer_retry}ๆฌก๏ผ")
|
| 136 |
+
return None
|
| 137 |
+
|
| 138 |
+
return None
|
| 139 |
+
|
| 140 |
+
def get_cached(self, file_path: str) -> Optional[Path]:
|
| 141 |
+
"""่ทๅๅทฒ็ผๅญ็ๆไปถ"""
|
| 142 |
+
path = self._get_path(file_path)
|
| 143 |
+
return path if path.exists() else None
|
| 144 |
+
|
| 145 |
+
async def _safe_cleanup(self):
|
| 146 |
+
"""ๅฎๅ
จๆธ
็๏ผๆ่ทๅผๅธธ๏ผ"""
|
| 147 |
+
try:
|
| 148 |
+
await self.cleanup()
|
| 149 |
+
except Exception as e:
|
| 150 |
+
self._log("error", f"ๅๅฐๆธ
็ๅคฑ่ดฅ: {e}")
|
| 151 |
+
|
| 152 |
+
async def cleanup(self):
|
| 153 |
+
"""ๆธ
็่ถ
้็ผๅญ"""
|
| 154 |
+
if self._cleanup_lock.locked():
|
| 155 |
+
return
|
| 156 |
+
|
| 157 |
+
async with self._cleanup_lock:
|
| 158 |
+
try:
|
| 159 |
+
max_mb = setting.global_config.get(f"{self.cache_type}_cache_max_size_mb", 500)
|
| 160 |
+
max_bytes = max_mb * 1024 * 1024
|
| 161 |
+
|
| 162 |
+
# ่ทๅๆไปถไฟกๆฏ (path, size, mtime)
|
| 163 |
+
files = [(f, (s := f.stat()).st_size, s.st_mtime)
|
| 164 |
+
for f in self.cache_dir.glob("*") if f.is_file()]
|
| 165 |
+
total = sum(size for _, size, _ in files)
|
| 166 |
+
|
| 167 |
+
if total <= max_bytes:
|
| 168 |
+
return
|
| 169 |
+
|
| 170 |
+
self._log("info", f"ๆธ
็็ผๅญ {total/1024/1024:.1f}MB -> {max_mb}MB")
|
| 171 |
+
|
| 172 |
+
# ๅ ้คๆๆง็ๆไปถ
|
| 173 |
+
for path, size, _ in sorted(files, key=lambda x: x[2]):
|
| 174 |
+
if total <= max_bytes:
|
| 175 |
+
break
|
| 176 |
+
await asyncio.to_thread(path.unlink)
|
| 177 |
+
total -= size
|
| 178 |
+
|
| 179 |
+
self._log("info", f"ๆธ
็ๅฎๆ: {total/1024/1024:.1f}MB")
|
| 180 |
+
except Exception as e:
|
| 181 |
+
self._log("error", f"ๆธ
็ๅคฑ่ดฅ: {e}")
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class ImageCache(CacheService):
|
| 185 |
+
"""ๅพ็็ผๅญๆๅก"""
|
| 186 |
+
|
| 187 |
+
def __init__(self):
|
| 188 |
+
super().__init__("image", timeout=30.0)
|
| 189 |
+
|
| 190 |
+
async def download_image(self, path: str, token: str) -> Optional[Path]:
|
| 191 |
+
"""ไธ่ฝฝๅพ็"""
|
| 192 |
+
return await self.download(path, token)
|
| 193 |
+
|
| 194 |
+
@staticmethod
|
| 195 |
+
def to_base64(image_path: Path) -> Optional[str]:
|
| 196 |
+
"""ๅพ็่ฝฌbase64"""
|
| 197 |
+
try:
|
| 198 |
+
if not image_path.exists():
|
| 199 |
+
logger.error(f"[ImageCache] ๆไปถไธๅญๅจ: {image_path}")
|
| 200 |
+
return None
|
| 201 |
+
|
| 202 |
+
data = base64.b64encode(image_path.read_bytes()).decode()
|
| 203 |
+
mime = MIME_TYPES.get(image_path.suffix.lower(), DEFAULT_MIME)
|
| 204 |
+
return f"data:{mime};base64,{data}"
|
| 205 |
+
except Exception as e:
|
| 206 |
+
logger.error(f"[ImageCache] ่ฝฌๆขๅคฑ่ดฅ: {e}")
|
| 207 |
+
return None
|
| 208 |
+
|
| 209 |
+
async def download_base64(self, path: str, token: str) -> Optional[str]:
|
| 210 |
+
"""ไธ่ฝฝๅนถ่ฝฌไธบbase64๏ผ่ชๅจๅ ้คไธดๆถๆไปถ๏ผ"""
|
| 211 |
+
try:
|
| 212 |
+
cache_path = await self.download(path, token)
|
| 213 |
+
if not cache_path:
|
| 214 |
+
return None
|
| 215 |
+
|
| 216 |
+
result = self.to_base64(cache_path)
|
| 217 |
+
|
| 218 |
+
# ๆธ
็ไธดๆถๆไปถ
|
| 219 |
+
try:
|
| 220 |
+
cache_path.unlink()
|
| 221 |
+
except Exception as e:
|
| 222 |
+
logger.warning(f"[ImageCache] ๅ ้คไธดๆถๆไปถๅคฑ่ดฅ: {e}")
|
| 223 |
+
|
| 224 |
+
return result
|
| 225 |
+
except Exception as e:
|
| 226 |
+
logger.error(f"[ImageCache] ไธ่ฝฝbase64ๅคฑ่ดฅ: {e}")
|
| 227 |
+
return None
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
class VideoCache(CacheService):
|
| 231 |
+
"""่ง้ข็ผๅญๆๅก"""
|
| 232 |
+
|
| 233 |
+
def __init__(self):
|
| 234 |
+
super().__init__("video", timeout=60.0)
|
| 235 |
+
|
| 236 |
+
async def download_video(self, path: str, token: str) -> Optional[Path]:
|
| 237 |
+
"""ไธ่ฝฝ่ง้ข"""
|
| 238 |
+
return await self.download(path, token)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
# ๅ
จๅฑๅฎไพ
|
| 242 |
+
image_cache_service = ImageCache()
|
| 243 |
+
video_cache_service = VideoCache()
|
app/services/grok/client.py
ADDED
|
@@ -0,0 +1,358 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Grok API ๅฎขๆท็ซฏ - ๅค็OpenAIๅฐGrok็่ฏทๆฑ่ฝฌๆขๅๅๅบๅค็"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import orjson
|
| 5 |
+
from typing import Dict, List, Tuple, Any, Optional
|
| 6 |
+
from curl_cffi.requests import AsyncSession as curl_AsyncSession
|
| 7 |
+
|
| 8 |
+
from app.core.config import setting
|
| 9 |
+
from app.core.logger import logger
|
| 10 |
+
from app.models.grok_models import Models
|
| 11 |
+
from app.services.grok.processer import GrokResponseProcessor
|
| 12 |
+
from app.services.grok.statsig import get_dynamic_headers
|
| 13 |
+
from app.services.grok.token import token_manager
|
| 14 |
+
from app.services.grok.upload import ImageUploadManager
|
| 15 |
+
from app.services.grok.create import PostCreateManager
|
| 16 |
+
from app.core.exception import GrokApiException
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# ๅธธ้
|
| 20 |
+
API_ENDPOINT = "https://grok.com/rest/app-chat/conversations/new"
|
| 21 |
+
TIMEOUT = 120
|
| 22 |
+
BROWSER = "chrome133a"
|
| 23 |
+
MAX_RETRY = 3
|
| 24 |
+
MAX_UPLOADS = 20 # ๆ้ซๅนถๅไธไผ ้ๅถไปฅๆฏๆๆด้ซๅนถๅ
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class GrokClient:
|
| 28 |
+
"""Grok API ๅฎขๆท็ซฏ"""
|
| 29 |
+
|
| 30 |
+
_upload_sem = None # ๅปถ่ฟๅๅงๅ
|
| 31 |
+
|
| 32 |
+
@staticmethod
|
| 33 |
+
def _get_upload_semaphore():
|
| 34 |
+
"""่ทๅไธไผ ไฟกๅท้๏ผๅจๆ้
็ฝฎ๏ผ"""
|
| 35 |
+
if GrokClient._upload_sem is None:
|
| 36 |
+
# ไป้
็ฝฎ่ฏปๅ๏ผๅฆๆไธๅฏ็จๅไฝฟ็จ้ป่ฎคๅผ
|
| 37 |
+
max_concurrency = setting.global_config.get("max_upload_concurrency", MAX_UPLOADS)
|
| 38 |
+
GrokClient._upload_sem = asyncio.Semaphore(max_concurrency)
|
| 39 |
+
logger.debug(f"[Client] ๅๅงๅไธไผ ๅนถๅ้ๅถ: {max_concurrency}")
|
| 40 |
+
return GrokClient._upload_sem
|
| 41 |
+
|
| 42 |
+
@staticmethod
|
| 43 |
+
async def openai_to_grok(request: dict):
|
| 44 |
+
"""่ฝฌๆขOpenAI่ฏทๆฑไธบGrok่ฏทๆฑ"""
|
| 45 |
+
model = request["model"]
|
| 46 |
+
content, images = GrokClient._extract_content(request["messages"])
|
| 47 |
+
stream = request.get("stream", False)
|
| 48 |
+
|
| 49 |
+
# ่ทๅๆจกๅไฟกๆฏ
|
| 50 |
+
info = Models.get_model_info(model)
|
| 51 |
+
grok_model, mode = Models.to_grok(model)
|
| 52 |
+
is_video = info.get("is_video_model", False)
|
| 53 |
+
|
| 54 |
+
# ่ง้ขๆจกๅ้ๅถ
|
| 55 |
+
if is_video and len(images) > 1:
|
| 56 |
+
logger.warning(f"[Client] ่ง้ขๆจกๅไป
ๆฏๆ1ๅผ ๅพ็๏ผๅทฒๆชๅๅ1ๅผ ")
|
| 57 |
+
images = images[:1]
|
| 58 |
+
|
| 59 |
+
return await GrokClient._retry(model, content, images, grok_model, mode, is_video, stream)
|
| 60 |
+
|
| 61 |
+
@staticmethod
|
| 62 |
+
async def _retry(model: str, content: str, images: List[str], grok_model: str, mode: str, is_video: bool, stream: bool):
|
| 63 |
+
"""้่ฏ่ฏทๆฑ"""
|
| 64 |
+
last_err = None
|
| 65 |
+
|
| 66 |
+
for i in range(MAX_RETRY):
|
| 67 |
+
try:
|
| 68 |
+
token = await token_manager.get_token(model)
|
| 69 |
+
img_ids, img_uris = await GrokClient._upload(images, token)
|
| 70 |
+
|
| 71 |
+
# ่ง้ขๆจกๅๅๅปบไผ่ฏ
|
| 72 |
+
post_id = None
|
| 73 |
+
if is_video and img_ids and img_uris:
|
| 74 |
+
post_id = await GrokClient._create_post(img_ids[0], img_uris[0], token)
|
| 75 |
+
|
| 76 |
+
payload = GrokClient._build_payload(content, grok_model, mode, img_ids, img_uris, is_video, post_id)
|
| 77 |
+
return await GrokClient._request(payload, token, model, stream, post_id)
|
| 78 |
+
|
| 79 |
+
except GrokApiException as e:
|
| 80 |
+
last_err = e
|
| 81 |
+
# ๆฃๆฅๆฏๅฆๅฏ้่ฏ
|
| 82 |
+
if e.error_code not in ["HTTP_ERROR", "NO_AVAILABLE_TOKEN"]:
|
| 83 |
+
raise
|
| 84 |
+
|
| 85 |
+
status = e.context.get("status") if e.context else None
|
| 86 |
+
retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
|
| 87 |
+
|
| 88 |
+
if status not in retry_codes:
|
| 89 |
+
raise
|
| 90 |
+
|
| 91 |
+
if i < MAX_RETRY - 1:
|
| 92 |
+
logger.warning(f"[Client] ๅคฑ่ดฅ(็ถๆ:{status}), ้่ฏ {i+1}/{MAX_RETRY}")
|
| 93 |
+
await asyncio.sleep(0.5)
|
| 94 |
+
|
| 95 |
+
raise last_err or GrokApiException("่ฏทๆฑๅคฑ่ดฅ", "REQUEST_ERROR")
|
| 96 |
+
|
| 97 |
+
@staticmethod
|
| 98 |
+
def _extract_content(messages: List[Dict]) -> Tuple[str, List[str]]:
|
| 99 |
+
"""ๆๅๆๆฌๅๅพ็๏ผไฟ็่ง่ฒ็ปๆ"""
|
| 100 |
+
formatted_messages = []
|
| 101 |
+
images = []
|
| 102 |
+
|
| 103 |
+
# ่ง่ฒๆ ๅฐ
|
| 104 |
+
role_map = {
|
| 105 |
+
"system": "็ณป็ป",
|
| 106 |
+
"user": "็จๆท",
|
| 107 |
+
"assistant": "grok"
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
for msg in messages:
|
| 111 |
+
role = msg.get("role", "user")
|
| 112 |
+
content = msg.get("content", "")
|
| 113 |
+
role_prefix = role_map.get(role, role)
|
| 114 |
+
|
| 115 |
+
# ๆๅๆๆฌๅ
ๅฎน
|
| 116 |
+
text_parts = []
|
| 117 |
+
if isinstance(content, list):
|
| 118 |
+
for item in content:
|
| 119 |
+
if item.get("type") == "text":
|
| 120 |
+
text_parts.append(item.get("text", ""))
|
| 121 |
+
elif item.get("type") == "image_url":
|
| 122 |
+
if url := item.get("image_url", {}).get("url"):
|
| 123 |
+
images.append(url)
|
| 124 |
+
else:
|
| 125 |
+
text_parts.append(content)
|
| 126 |
+
|
| 127 |
+
# ๅๅนถ่ฏฅๆถๆฏ็ๆๆฌๅนถๆทปๅ ่ง่ฒๅ็ผ
|
| 128 |
+
msg_text = "".join(text_parts).strip()
|
| 129 |
+
if msg_text:
|
| 130 |
+
formatted_messages.append(f"{role_prefix}๏ผ{msg_text}")
|
| 131 |
+
|
| 132 |
+
# ็จๆข่ก็ฌฆ่ฟๆฅๆๆๆถๆฏ
|
| 133 |
+
return "\n".join(formatted_messages), images
|
| 134 |
+
|
| 135 |
+
@staticmethod
|
| 136 |
+
async def _upload(urls: List[str], token: str) -> Tuple[List[str], List[str]]:
|
| 137 |
+
"""ๅนถๅไธไผ ๅพ็"""
|
| 138 |
+
if not urls:
|
| 139 |
+
return [], []
|
| 140 |
+
|
| 141 |
+
async def upload_limited(url):
|
| 142 |
+
async with GrokClient._get_upload_semaphore():
|
| 143 |
+
return await ImageUploadManager.upload(url, token)
|
| 144 |
+
|
| 145 |
+
results = await asyncio.gather(*[upload_limited(u) for u in urls], return_exceptions=True)
|
| 146 |
+
|
| 147 |
+
ids, uris = [], []
|
| 148 |
+
for url, result in zip(urls, results):
|
| 149 |
+
if isinstance(result, Exception):
|
| 150 |
+
logger.warning(f"[Client] ไธไผ ๅคฑ่ดฅ: {url} - {result}")
|
| 151 |
+
elif isinstance(result, tuple) and len(result) == 2:
|
| 152 |
+
fid, furi = result
|
| 153 |
+
if fid:
|
| 154 |
+
ids.append(fid)
|
| 155 |
+
uris.append(furi)
|
| 156 |
+
|
| 157 |
+
return ids, uris
|
| 158 |
+
|
| 159 |
+
@staticmethod
|
| 160 |
+
async def _create_post(file_id: str, file_uri: str, token: str) -> Optional[str]:
|
| 161 |
+
"""ๅๅปบ่ง้ขไผ่ฏ"""
|
| 162 |
+
try:
|
| 163 |
+
result = await PostCreateManager.create(file_id, file_uri, token)
|
| 164 |
+
if result and result.get("success"):
|
| 165 |
+
return result.get("post_id")
|
| 166 |
+
except Exception as e:
|
| 167 |
+
logger.warning(f"[Client] ๅๅปบไผ่ฏๅคฑ่ดฅ: {e}")
|
| 168 |
+
return None
|
| 169 |
+
|
| 170 |
+
@staticmethod
|
| 171 |
+
def _build_payload(content: str, model: str, mode: str, img_ids: List[str], img_uris: List[str], is_video: bool = False, post_id: str = None) -> Dict:
|
| 172 |
+
"""ๆๅปบ่ฏทๆฑ่ฝฝ่ท"""
|
| 173 |
+
# ่ง้ขๆจกๅ็นๆฎๅค็
|
| 174 |
+
if is_video and img_uris:
|
| 175 |
+
img_msg = f"https://grok.com/imagine/{post_id}" if post_id else f"https://assets.grok.com/post/{img_uris[0]}"
|
| 176 |
+
return {
|
| 177 |
+
"temporary": True,
|
| 178 |
+
"modelName": "grok-3",
|
| 179 |
+
"message": f"{img_msg} {content} --mode=custom",
|
| 180 |
+
"fileAttachments": img_ids,
|
| 181 |
+
"toolOverrides": {"videoGen": True}
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
# ๆ ๅ่ฝฝ่ท
|
| 185 |
+
return {
|
| 186 |
+
"temporary": setting.grok_config.get("temporary", True),
|
| 187 |
+
"modelName": model,
|
| 188 |
+
"message": content,
|
| 189 |
+
"fileAttachments": img_ids,
|
| 190 |
+
"imageAttachments": [],
|
| 191 |
+
"disableSearch": False,
|
| 192 |
+
"enableImageGeneration": True,
|
| 193 |
+
"returnImageBytes": False,
|
| 194 |
+
"returnRawGrokInXaiRequest": False,
|
| 195 |
+
"enableImageStreaming": True,
|
| 196 |
+
"imageGenerationCount": 2,
|
| 197 |
+
"forceConcise": False,
|
| 198 |
+
"toolOverrides": {},
|
| 199 |
+
"enableSideBySide": True,
|
| 200 |
+
"sendFinalMetadata": True,
|
| 201 |
+
"isReasoning": False,
|
| 202 |
+
"webpageUrls": [],
|
| 203 |
+
"disableTextFollowUps": True,
|
| 204 |
+
"responseMetadata": {"requestModelDetails": {"modelId": model}},
|
| 205 |
+
"disableMemory": False,
|
| 206 |
+
"forceSideBySide": False,
|
| 207 |
+
"modelMode": mode,
|
| 208 |
+
"isAsyncChat": False
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
@staticmethod
|
| 212 |
+
async def _request(payload: dict, token: str, model: str, stream: bool, post_id: str = None):
|
| 213 |
+
"""ๅ้่ฏทๆฑ"""
|
| 214 |
+
if not token:
|
| 215 |
+
raise GrokApiException("่ฎค่ฏไปค็็ผบๅคฑ", "NO_AUTH_TOKEN")
|
| 216 |
+
|
| 217 |
+
# ๅคๅฑ้่ฏ๏ผๅฏ้
็ฝฎ็ถๆ็ ๏ผ401/429็ญ๏ผ
|
| 218 |
+
retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
|
| 219 |
+
MAX_OUTER_RETRY = 3
|
| 220 |
+
|
| 221 |
+
for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎ้
้่ฏ3ๆฌก
|
| 222 |
+
# ๅ
ๅฑ้่ฏ๏ผ403ไปฃ็ๆฑ ้่ฏ
|
| 223 |
+
max_403_retries = 5
|
| 224 |
+
retry_403_count = 0
|
| 225 |
+
|
| 226 |
+
while retry_403_count <= max_403_retries:
|
| 227 |
+
# ๅผๆญฅ่ทๅไปฃ็
|
| 228 |
+
from app.core.proxy_pool import proxy_pool
|
| 229 |
+
|
| 230 |
+
# ๅฆๆๆฏ403้่ฏไธไฝฟ็จไปฃ็ๆฑ ๏ผๅผบๅถๅทๆฐไปฃ็
|
| 231 |
+
if retry_403_count > 0 and proxy_pool._enabled:
|
| 232 |
+
logger.info(f"[Client] 403้่ฏ {retry_403_count}/{max_403_retries}๏ผๅทๆฐไปฃ็...")
|
| 233 |
+
proxy = await proxy_pool.force_refresh()
|
| 234 |
+
else:
|
| 235 |
+
proxy = await setting.get_proxy_async("service")
|
| 236 |
+
|
| 237 |
+
proxies = {"http": proxy, "https": proxy} if proxy else None
|
| 238 |
+
|
| 239 |
+
# ๆๅปบ่ฏทๆฑๅคด๏ผๆพๅจๅพช็ฏๅ
ไปฅๆฏๆ้่ฏๆฐToken๏ผ
|
| 240 |
+
headers = GrokClient._build_headers(token)
|
| 241 |
+
if model == "grok-imagine-0.9":
|
| 242 |
+
file_attachments = payload.get("fileAttachments", [])
|
| 243 |
+
ref_id = post_id or (file_attachments[0] if file_attachments else "")
|
| 244 |
+
if ref_id:
|
| 245 |
+
headers["Referer"] = f"https://grok.com/imagine/{ref_id}"
|
| 246 |
+
|
| 247 |
+
# ๅๅปบไผ่ฏๅนถๆง่ก่ฏทๆฑ
|
| 248 |
+
session = curl_AsyncSession(impersonate=BROWSER)
|
| 249 |
+
try:
|
| 250 |
+
response = await session.post(
|
| 251 |
+
API_ENDPOINT,
|
| 252 |
+
headers=headers,
|
| 253 |
+
data=orjson.dumps(payload),
|
| 254 |
+
timeout=TIMEOUT,
|
| 255 |
+
stream=True,
|
| 256 |
+
proxies=proxies
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
# ๅ
ๅฑ403้่ฏ๏ผไป
ๅฝๆไปฃ็ๆฑ ๆถ่งฆๅ
|
| 260 |
+
if response.status_code == 403 and proxy_pool._enabled:
|
| 261 |
+
retry_403_count += 1
|
| 262 |
+
if retry_403_count <= max_403_retries:
|
| 263 |
+
logger.warning(f"[Client] ้ๅฐ403้่ฏฏ๏ผๆญฃๅจ้่ฏ ({retry_403_count}/{max_403_retries})...")
|
| 264 |
+
await session.close()
|
| 265 |
+
await asyncio.sleep(0.5)
|
| 266 |
+
continue
|
| 267 |
+
logger.error(f"[Client] 403้่ฏฏ๏ผๅทฒ้่ฏ{retry_403_count-1}ๆฌก๏ผๆพๅผ")
|
| 268 |
+
|
| 269 |
+
# ๆฃๆฅๅฏ้
็ฝฎ็ถๆ็ ้่ฏฏ - ๅคๅฑ้่ฏ
|
| 270 |
+
if response.status_code in retry_codes:
|
| 271 |
+
if outer_retry < MAX_OUTER_RETRY:
|
| 272 |
+
delay = (outer_retry + 1) * 0.1
|
| 273 |
+
logger.warning(f"[Client] ้ๅฐ{response.status_code}้่ฏฏ๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผ็ญๅพ
{delay}s...")
|
| 274 |
+
await session.close()
|
| 275 |
+
await asyncio.sleep(delay)
|
| 276 |
+
break # ่ทณๅบๅ
ๅฑๅพช็ฏ๏ผ่ฟๅ
ฅๅคๅฑ้่ฏ
|
| 277 |
+
else:
|
| 278 |
+
logger.error(f"[Client] {response.status_code}้่ฏฏ๏ผๅทฒ้่ฏ{outer_retry}ๆฌก๏ผๆพๅผ")
|
| 279 |
+
try:
|
| 280 |
+
GrokClient._handle_error(response, token)
|
| 281 |
+
finally:
|
| 282 |
+
await session.close()
|
| 283 |
+
|
| 284 |
+
# ๆฃๆฅๅ
ถไปๅๅบ็ถๆ
|
| 285 |
+
if response.status_code != 200:
|
| 286 |
+
try:
|
| 287 |
+
GrokClient._handle_error(response, token)
|
| 288 |
+
finally:
|
| 289 |
+
await session.close()
|
| 290 |
+
|
| 291 |
+
# ๆๅ - ้็ฝฎๅคฑ่ดฅ่ฎกๆฐ
|
| 292 |
+
asyncio.create_task(token_manager.reset_failure(token))
|
| 293 |
+
|
| 294 |
+
if outer_retry > 0 or retry_403_count > 0:
|
| 295 |
+
logger.info(f"[Client] ้่ฏๆๅ๏ผ")
|
| 296 |
+
|
| 297 |
+
# ๅค็ๅๅบ
|
| 298 |
+
if stream:
|
| 299 |
+
# ๆตๅผๅๅบ็ฑ่ฟญไปฃๅจ่ด่ดฃๅ
ณ้ญ session
|
| 300 |
+
result = GrokResponseProcessor.process_stream(response, token, session)
|
| 301 |
+
else:
|
| 302 |
+
# ๆฎ้ๅๅบๅค็ๅฎ็ซๅณๅ
ณ้ญ session
|
| 303 |
+
try:
|
| 304 |
+
result = await GrokResponseProcessor.process_normal(response, token, model)
|
| 305 |
+
finally:
|
| 306 |
+
await session.close()
|
| 307 |
+
|
| 308 |
+
asyncio.create_task(GrokClient._update_limits(token, model))
|
| 309 |
+
return result
|
| 310 |
+
|
| 311 |
+
except Exception as e:
|
| 312 |
+
await session.close()
|
| 313 |
+
if "RequestsError" in str(type(e)):
|
| 314 |
+
logger.error(f"[Client] ็ฝ็ป้่ฏฏ: {e}")
|
| 315 |
+
raise GrokApiException(f"็ฝ็ป้่ฏฏ: {e}", "NETWORK_ERROR") from e
|
| 316 |
+
raise
|
| 317 |
+
|
| 318 |
+
raise GrokApiException("่ฏทๆฑๅคฑ่ดฅ๏ผๅทฒ่พพๅฐๆๅคง้่ฏๆฌกๆฐ", "MAX_RETRIES_EXCEEDED")
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
@staticmethod
|
| 322 |
+
def _build_headers(token: str) -> Dict[str, str]:
|
| 323 |
+
"""ๆๅปบ่ฏทๆฑๅคด"""
|
| 324 |
+
headers = get_dynamic_headers("/rest/app-chat/conversations/new")
|
| 325 |
+
cf = setting.grok_config.get("cf_clearance", "")
|
| 326 |
+
headers["Cookie"] = f"{token};{cf}" if cf else token
|
| 327 |
+
return headers
|
| 328 |
+
|
| 329 |
+
@staticmethod
|
| 330 |
+
def _handle_error(response, token: str):
|
| 331 |
+
"""ๅค็้่ฏฏ"""
|
| 332 |
+
if response.status_code == 403:
|
| 333 |
+
msg = "ๆจ็IP่ขซๆฆๆช๏ผ่ฏทๅฐ่ฏไปฅไธๆนๆณไนไธ: 1.ๆดๆขIP 2.ไฝฟ็จไปฃ็ 3.้
็ฝฎCFๅผ"
|
| 334 |
+
data = {"cf_blocked": True, "status": 403}
|
| 335 |
+
logger.warning(f"[Client] {msg}")
|
| 336 |
+
else:
|
| 337 |
+
try:
|
| 338 |
+
data = response.json()
|
| 339 |
+
msg = str(data)
|
| 340 |
+
except:
|
| 341 |
+
data = response.text
|
| 342 |
+
msg = data[:200] if data else "ๆช็ฅ้่ฏฏ"
|
| 343 |
+
|
| 344 |
+
asyncio.create_task(token_manager.record_failure(token, response.status_code, msg))
|
| 345 |
+
asyncio.create_task(token_manager.apply_cooldown(token, response.status_code))
|
| 346 |
+
raise GrokApiException(
|
| 347 |
+
f"่ฏทๆฑๅคฑ่ดฅ: {response.status_code} - {msg}",
|
| 348 |
+
"HTTP_ERROR",
|
| 349 |
+
{"status": response.status_code, "data": data}
|
| 350 |
+
)
|
| 351 |
+
|
| 352 |
+
@staticmethod
|
| 353 |
+
async def _update_limits(token: str, model: str):
|
| 354 |
+
"""ๆดๆฐ้็้ๅถ"""
|
| 355 |
+
try:
|
| 356 |
+
await token_manager.check_limits(token, model)
|
| 357 |
+
except Exception as e:
|
| 358 |
+
logger.error(f"[Client] ๆดๆฐ้ๅถๅคฑ่ดฅ: {e}")
|
app/services/grok/create.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Postๅๅปบ็ฎก็ๅจ - ็จไบ่ง้ข็ๆๅ็ไผ่ฏๅๅปบ"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import orjson
|
| 5 |
+
from typing import Dict, Any, Optional
|
| 6 |
+
from curl_cffi.requests import AsyncSession
|
| 7 |
+
|
| 8 |
+
from app.services.grok.statsig import get_dynamic_headers
|
| 9 |
+
from app.core.exception import GrokApiException
|
| 10 |
+
from app.core.config import setting
|
| 11 |
+
from app.core.logger import logger
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# ๅธธ้
|
| 15 |
+
ENDPOINT = "https://grok.com/rest/media/post/create"
|
| 16 |
+
TIMEOUT = 30
|
| 17 |
+
BROWSER = "chrome133a"
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class PostCreateManager:
|
| 21 |
+
"""ไผ่ฏๅๅปบ็ฎก็ๅจ"""
|
| 22 |
+
|
| 23 |
+
@staticmethod
|
| 24 |
+
async def create(file_id: str, file_uri: str, auth_token: str) -> Optional[Dict[str, Any]]:
|
| 25 |
+
"""ๅๅปบไผ่ฏ่ฎฐๅฝ
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
file_id: ๆไปถID
|
| 29 |
+
file_uri: ๆไปถURI
|
| 30 |
+
auth_token: ่ฎค่ฏไปค็
|
| 31 |
+
|
| 32 |
+
Returns:
|
| 33 |
+
ไผ่ฏไฟกๆฏๅญๅ
ธ๏ผๅ
ๅซpost_id็ญ
|
| 34 |
+
"""
|
| 35 |
+
# ๅๆฐ้ช่ฏ
|
| 36 |
+
if not file_id or not file_uri:
|
| 37 |
+
raise GrokApiException("ๆไปถIDๆURI็ผบๅคฑ", "INVALID_PARAMS")
|
| 38 |
+
if not auth_token:
|
| 39 |
+
raise GrokApiException("่ฎค่ฏไปค็็ผบๅคฑ", "NO_AUTH_TOKEN")
|
| 40 |
+
|
| 41 |
+
try:
|
| 42 |
+
# ๆๅปบ่ฏทๆฑ
|
| 43 |
+
data = {
|
| 44 |
+
"media_url": f"https://assets.grok.com/{file_uri}",
|
| 45 |
+
"media_type": "MEDIA_POST_TYPE_IMAGE"
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
cf = setting.grok_config.get("cf_clearance", "")
|
| 49 |
+
headers = {
|
| 50 |
+
**get_dynamic_headers("/rest/media/post/create"),
|
| 51 |
+
"Cookie": f"{auth_token};{cf}" if cf else auth_token
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
# ๅคๅฑ้่ฏ๏ผๅฏ้
็ฝฎ็ถๆ็ ๏ผ401/429็ญ๏ผ
|
| 55 |
+
retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
|
| 56 |
+
MAX_OUTER_RETRY = 3
|
| 57 |
+
|
| 58 |
+
for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎ้
้่ฏ3ๆฌก
|
| 59 |
+
# ๅ
ๅฑ้่ฏ๏ผ403ไปฃ็ๆฑ ้่ฏ
|
| 60 |
+
max_403_retries = 5
|
| 61 |
+
retry_403_count = 0
|
| 62 |
+
|
| 63 |
+
while retry_403_count <= max_403_retries:
|
| 64 |
+
# ๅผๆญฅ่ทๅไปฃ็๏ผๆฏๆไปฃ็ๆฑ ๏ผ
|
| 65 |
+
from app.core.proxy_pool import proxy_pool
|
| 66 |
+
|
| 67 |
+
# ๅฆๆๆฏ403้่ฏไธไฝฟ็จไปฃ็ๆฑ ๏ผๅผบๅถๅทๆฐไปฃ็
|
| 68 |
+
if retry_403_count > 0 and proxy_pool._enabled:
|
| 69 |
+
logger.info(f"[PostCreate] 403้่ฏ {retry_403_count}/{max_403_retries}๏ผๅทๆฐไปฃ็...")
|
| 70 |
+
proxy = await proxy_pool.force_refresh()
|
| 71 |
+
else:
|
| 72 |
+
proxy = await setting.get_proxy_async("service")
|
| 73 |
+
|
| 74 |
+
proxies = {"http": proxy, "https": proxy} if proxy else None
|
| 75 |
+
|
| 76 |
+
# ๅ้่ฏทๆฑ
|
| 77 |
+
async with AsyncSession() as session:
|
| 78 |
+
response = await session.post(
|
| 79 |
+
ENDPOINT,
|
| 80 |
+
headers=headers,
|
| 81 |
+
json=data,
|
| 82 |
+
impersonate=BROWSER,
|
| 83 |
+
timeout=TIMEOUT,
|
| 84 |
+
proxies=proxies
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
# ๅ
ๅฑ403้่ฏ๏ผไป
ๅฝๆไปฃ็ๆฑ ๆถ่งฆๅ
|
| 88 |
+
if response.status_code == 403 and proxy_pool._enabled:
|
| 89 |
+
retry_403_count += 1
|
| 90 |
+
|
| 91 |
+
if retry_403_count <= max_403_retries:
|
| 92 |
+
logger.warning(f"[PostCreate] ้ๅฐ403้่ฏฏ๏ผๆญฃๅจ้่ฏ ({retry_403_count}/{max_403_retries})...")
|
| 93 |
+
await asyncio.sleep(0.5)
|
| 94 |
+
continue
|
| 95 |
+
|
| 96 |
+
# ๅ
ๅฑ้่ฏๅ
จ้จๅคฑ่ดฅ
|
| 97 |
+
logger.error(f"[PostCreate] 403้่ฏฏ๏ผๅทฒ้่ฏ{retry_403_count-1}ๆฌก๏ผๆพๅผ")
|
| 98 |
+
|
| 99 |
+
# ๆฃๆฅๅฏ้
็ฝฎ็ถๆ็ ้่ฏฏ - ๅคๅฑ้่ฏ
|
| 100 |
+
if response.status_code in retry_codes:
|
| 101 |
+
if outer_retry < MAX_OUTER_RETRY:
|
| 102 |
+
delay = (outer_retry + 1) * 0.1 # ๆธ่ฟๅปถ่ฟ๏ผ0.1s, 0.2s, 0.3s
|
| 103 |
+
logger.warning(f"[PostCreate] ้ๅฐ{response.status_code}้่ฏฏ๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผ็ญๅพ
{delay}s...")
|
| 104 |
+
await asyncio.sleep(delay)
|
| 105 |
+
break # ่ทณๅบๅ
ๅฑๅพช็ฏ๏ผ่ฟๅ
ฅๅคๅฑ้่ฏ
|
| 106 |
+
else:
|
| 107 |
+
logger.error(f"[PostCreate] {response.status_code}้่ฏฏ๏ผๅทฒ้่ฏ{outer_retry}ๆฌก๏ผๆพๅผ")
|
| 108 |
+
raise GrokApiException(f"ๅๅปบๅคฑ่ดฅ: {response.status_code}้่ฏฏ", "CREATE_ERROR")
|
| 109 |
+
|
| 110 |
+
if response.status_code == 200:
|
| 111 |
+
result = response.json()
|
| 112 |
+
post_id = result.get("post", {}).get("id", "")
|
| 113 |
+
|
| 114 |
+
if outer_retry > 0 or retry_403_count > 0:
|
| 115 |
+
logger.info(f"[PostCreate] ้่ฏๆๅ๏ผ")
|
| 116 |
+
|
| 117 |
+
logger.debug(f"[PostCreate] ๆๅ๏ผไผ่ฏID: {post_id}")
|
| 118 |
+
return {
|
| 119 |
+
"post_id": post_id,
|
| 120 |
+
"file_id": file_id,
|
| 121 |
+
"file_uri": file_uri,
|
| 122 |
+
"success": True,
|
| 123 |
+
"data": result
|
| 124 |
+
}
|
| 125 |
+
|
| 126 |
+
# ๅ
ถไป้่ฏฏๅค็
|
| 127 |
+
try:
|
| 128 |
+
error = response.json()
|
| 129 |
+
msg = f"็ถๆ็ : {response.status_code}, ่ฏฆๆ
: {error}"
|
| 130 |
+
except:
|
| 131 |
+
msg = f"็ถๆ็ : {response.status_code}, ่ฏฆๆ
: {response.text[:200]}"
|
| 132 |
+
|
| 133 |
+
logger.error(f"[PostCreate] ๅคฑ่ดฅ: {msg}")
|
| 134 |
+
raise GrokApiException(f"ๅๅปบๅคฑ่ดฅ: {msg}", "CREATE_ERROR")
|
| 135 |
+
|
| 136 |
+
except GrokApiException:
|
| 137 |
+
raise
|
| 138 |
+
except Exception as e:
|
| 139 |
+
logger.error(f"[PostCreate] ๅผๅธธ: {e}")
|
| 140 |
+
raise GrokApiException(f"ๅๅปบๅผๅธธ: {e}", "CREATE_ERROR") from e
|
app/services/grok/processer.py
ADDED
|
@@ -0,0 +1,430 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Grok API ๅๅบๅค็ๅจ - ๅค็ๆตๅผๅ้ๆตๅผๅๅบ"""
|
| 2 |
+
|
| 3 |
+
import orjson
|
| 4 |
+
import uuid
|
| 5 |
+
import time
|
| 6 |
+
import asyncio
|
| 7 |
+
from typing import AsyncGenerator, Tuple, Any
|
| 8 |
+
|
| 9 |
+
from app.core.config import setting
|
| 10 |
+
from app.core.exception import GrokApiException
|
| 11 |
+
from app.core.logger import logger
|
| 12 |
+
from app.models.openai_schema import (
|
| 13 |
+
OpenAIChatCompletionResponse,
|
| 14 |
+
OpenAIChatCompletionChoice,
|
| 15 |
+
OpenAIChatCompletionMessage,
|
| 16 |
+
OpenAIChatCompletionChunkResponse,
|
| 17 |
+
OpenAIChatCompletionChunkChoice,
|
| 18 |
+
OpenAIChatCompletionChunkMessage
|
| 19 |
+
)
|
| 20 |
+
from app.services.grok.cache import image_cache_service, video_cache_service
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class StreamTimeoutManager:
|
| 24 |
+
"""ๆตๅผๅๅบ่ถ
ๆถ็ฎก็"""
|
| 25 |
+
|
| 26 |
+
def __init__(self, chunk_timeout: int = 120, first_timeout: int = 30, total_timeout: int = 600):
|
| 27 |
+
self.chunk_timeout = chunk_timeout
|
| 28 |
+
self.first_timeout = first_timeout
|
| 29 |
+
self.total_timeout = total_timeout
|
| 30 |
+
self.start_time = asyncio.get_event_loop().time()
|
| 31 |
+
self.last_chunk_time = self.start_time
|
| 32 |
+
self.first_received = False
|
| 33 |
+
|
| 34 |
+
def check_timeout(self) -> Tuple[bool, str]:
|
| 35 |
+
"""ๆฃๆฅ่ถ
ๆถ"""
|
| 36 |
+
now = asyncio.get_event_loop().time()
|
| 37 |
+
|
| 38 |
+
if not self.first_received and now - self.start_time > self.first_timeout:
|
| 39 |
+
return True, f"้ฆๆฌกๅๅบ่ถ
ๆถ({self.first_timeout}็ง)"
|
| 40 |
+
|
| 41 |
+
if self.total_timeout > 0 and now - self.start_time > self.total_timeout:
|
| 42 |
+
return True, f"ๆป่ถ
ๆถ({self.total_timeout}็ง)"
|
| 43 |
+
|
| 44 |
+
if self.first_received and now - self.last_chunk_time > self.chunk_timeout:
|
| 45 |
+
return True, f"ๆฐๆฎๅ่ถ
ๆถ({self.chunk_timeout}็ง)"
|
| 46 |
+
|
| 47 |
+
return False, ""
|
| 48 |
+
|
| 49 |
+
def mark_received(self):
|
| 50 |
+
"""ๆ ่ฎฐๆถๅฐๆฐๆฎ"""
|
| 51 |
+
self.last_chunk_time = asyncio.get_event_loop().time()
|
| 52 |
+
self.first_received = True
|
| 53 |
+
|
| 54 |
+
def duration(self) -> float:
|
| 55 |
+
"""่ทๅๆป่ๆถ"""
|
| 56 |
+
return asyncio.get_event_loop().time() - self.start_time
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class GrokResponseProcessor:
|
| 60 |
+
"""Grokๅๅบๅค็ๅจ"""
|
| 61 |
+
|
| 62 |
+
@staticmethod
|
| 63 |
+
async def process_normal(response, auth_token: str, model: str = None) -> OpenAIChatCompletionResponse:
|
| 64 |
+
"""ๅค็้ๆตๅผๅๅบ"""
|
| 65 |
+
response_closed = False
|
| 66 |
+
try:
|
| 67 |
+
async for chunk in response.aiter_lines():
|
| 68 |
+
if not chunk:
|
| 69 |
+
continue
|
| 70 |
+
|
| 71 |
+
data = orjson.loads(chunk)
|
| 72 |
+
|
| 73 |
+
# ้่ฏฏๆฃๆฅ
|
| 74 |
+
if error := data.get("error"):
|
| 75 |
+
raise GrokApiException(
|
| 76 |
+
f"API้่ฏฏ: {error.get('message', 'ๆช็ฅ้่ฏฏ')}",
|
| 77 |
+
"API_ERROR",
|
| 78 |
+
{"code": error.get("code")}
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
grok_resp = data.get("result", {}).get("response", {})
|
| 82 |
+
|
| 83 |
+
# ่ง้ขๅๅบ
|
| 84 |
+
if video_resp := grok_resp.get("streamingVideoGenerationResponse"):
|
| 85 |
+
if video_url := video_resp.get("videoUrl"):
|
| 86 |
+
content = await GrokResponseProcessor._build_video_content(video_url, auth_token)
|
| 87 |
+
result = GrokResponseProcessor._build_response(content, model or "grok-imagine-0.9")
|
| 88 |
+
response_closed = True
|
| 89 |
+
response.close()
|
| 90 |
+
return result
|
| 91 |
+
|
| 92 |
+
# ๆจกๅๅๅบ
|
| 93 |
+
model_response = grok_resp.get("modelResponse")
|
| 94 |
+
if not model_response:
|
| 95 |
+
continue
|
| 96 |
+
|
| 97 |
+
if error_msg := model_response.get("error"):
|
| 98 |
+
raise GrokApiException(f"ๆจกๅ้่ฏฏ: {error_msg}", "MODEL_ERROR")
|
| 99 |
+
|
| 100 |
+
# ๆๅปบๅ
ๅฎน
|
| 101 |
+
content = model_response.get("message", "")
|
| 102 |
+
model_name = model_response.get("model")
|
| 103 |
+
|
| 104 |
+
# ๅค็ๅพ็
|
| 105 |
+
if images := model_response.get("generatedImageUrls"):
|
| 106 |
+
content = await GrokResponseProcessor._append_images(content, images, auth_token)
|
| 107 |
+
|
| 108 |
+
result = GrokResponseProcessor._build_response(content, model_name)
|
| 109 |
+
response_closed = True
|
| 110 |
+
response.close()
|
| 111 |
+
return result
|
| 112 |
+
|
| 113 |
+
raise GrokApiException("ๆ ๅๅบๆฐๆฎ", "NO_RESPONSE")
|
| 114 |
+
|
| 115 |
+
except orjson.JSONDecodeError as e:
|
| 116 |
+
logger.error(f"[Processor] JSON่งฃๆๅคฑ่ดฅ: {e}")
|
| 117 |
+
raise GrokApiException(f"JSON่งฃๆๅคฑ่ดฅ: {e}", "JSON_ERROR") from e
|
| 118 |
+
except Exception as e:
|
| 119 |
+
logger.error(f"[Processor] ๅค็้่ฏฏ: {type(e).__name__}: {e}")
|
| 120 |
+
raise GrokApiException(f"ๅๅบๅค็้่ฏฏ: {e}", "PROCESS_ERROR") from e
|
| 121 |
+
finally:
|
| 122 |
+
if not response_closed and hasattr(response, 'close'):
|
| 123 |
+
try:
|
| 124 |
+
response.close()
|
| 125 |
+
except Exception as e:
|
| 126 |
+
logger.warning(f"[Processor] ๅ
ณ้ญๅๅบๅคฑ่ดฅ: {e}")
|
| 127 |
+
|
| 128 |
+
@staticmethod
|
| 129 |
+
async def process_stream(response, auth_token: str, session: Any = None) -> AsyncGenerator[str, None]:
|
| 130 |
+
"""ๅค็ๆตๅผๅๅบ"""
|
| 131 |
+
# ็ถๆๅ้
|
| 132 |
+
is_image = False
|
| 133 |
+
is_thinking = False
|
| 134 |
+
thinking_finished = False
|
| 135 |
+
model = None
|
| 136 |
+
filtered_tags = setting.grok_config.get("filtered_tags", "").split(",")
|
| 137 |
+
video_progress_started = False
|
| 138 |
+
last_video_progress = -1
|
| 139 |
+
response_closed = False
|
| 140 |
+
show_thinking = setting.grok_config.get("show_thinking", True)
|
| 141 |
+
|
| 142 |
+
# ่ถ
ๆถ็ฎก็
|
| 143 |
+
timeout_mgr = StreamTimeoutManager(
|
| 144 |
+
chunk_timeout=setting.grok_config.get("stream_chunk_timeout", 120),
|
| 145 |
+
first_timeout=setting.grok_config.get("stream_first_response_timeout", 30),
|
| 146 |
+
total_timeout=setting.grok_config.get("stream_total_timeout", 600)
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
def make_chunk(content: str, finish: str = None):
|
| 150 |
+
"""็ๆๅๅบๅ"""
|
| 151 |
+
chunk_data = OpenAIChatCompletionChunkResponse(
|
| 152 |
+
id=f"chatcmpl-{uuid.uuid4()}",
|
| 153 |
+
created=int(time.time()),
|
| 154 |
+
model=model or "grok-4-mini-thinking-tahoe",
|
| 155 |
+
choices=[OpenAIChatCompletionChunkChoice(
|
| 156 |
+
index=0,
|
| 157 |
+
delta=OpenAIChatCompletionChunkMessage(
|
| 158 |
+
role="assistant",
|
| 159 |
+
content=content
|
| 160 |
+
) if content else {},
|
| 161 |
+
finish_reason=finish
|
| 162 |
+
)]
|
| 163 |
+
)
|
| 164 |
+
return f"data: {chunk_data.model_dump_json()}\n\n"
|
| 165 |
+
|
| 166 |
+
try:
|
| 167 |
+
async for chunk in response.aiter_lines():
|
| 168 |
+
# ่ถ
ๆถๆฃๆฅ
|
| 169 |
+
is_timeout, timeout_msg = timeout_mgr.check_timeout()
|
| 170 |
+
if is_timeout:
|
| 171 |
+
logger.warning(f"[Processor] {timeout_msg}")
|
| 172 |
+
yield make_chunk("", "stop")
|
| 173 |
+
yield "data: [DONE]\n\n"
|
| 174 |
+
return
|
| 175 |
+
|
| 176 |
+
logger.debug(f"[Processor] ๆถๅฐๆฐๆฎๅ: {len(chunk)} bytes")
|
| 177 |
+
if not chunk:
|
| 178 |
+
continue
|
| 179 |
+
|
| 180 |
+
try:
|
| 181 |
+
data = orjson.loads(chunk)
|
| 182 |
+
|
| 183 |
+
# ้่ฏฏๆฃๆฅ
|
| 184 |
+
if error := data.get("error"):
|
| 185 |
+
error_msg = error.get('message', 'ๆช็ฅ้่ฏฏ')
|
| 186 |
+
logger.error(f"[Processor] API้่ฏฏ: {error_msg}")
|
| 187 |
+
yield make_chunk(f"Error: {error_msg}", "stop")
|
| 188 |
+
yield "data: [DONE]\n\n"
|
| 189 |
+
return
|
| 190 |
+
|
| 191 |
+
grok_resp = data.get("result", {}).get("response", {})
|
| 192 |
+
logger.debug(f"[Processor] ่งฃๆๅๅบ: {len(grok_resp)} bytes")
|
| 193 |
+
if not grok_resp:
|
| 194 |
+
continue
|
| 195 |
+
|
| 196 |
+
timeout_mgr.mark_received()
|
| 197 |
+
|
| 198 |
+
# ๆดๆฐๆจกๅ
|
| 199 |
+
if user_resp := grok_resp.get("userResponse"):
|
| 200 |
+
if m := user_resp.get("model"):
|
| 201 |
+
model = m
|
| 202 |
+
|
| 203 |
+
# ่ง้ขๅค็
|
| 204 |
+
if video_resp := grok_resp.get("streamingVideoGenerationResponse"):
|
| 205 |
+
progress = video_resp.get("progress", 0)
|
| 206 |
+
v_url = video_resp.get("videoUrl")
|
| 207 |
+
|
| 208 |
+
# ่ฟๅบฆๆดๆฐ
|
| 209 |
+
if progress > last_video_progress:
|
| 210 |
+
last_video_progress = progress
|
| 211 |
+
if show_thinking:
|
| 212 |
+
if not video_progress_started:
|
| 213 |
+
content = f"<think>่ง้ขๅทฒ็ๆ{progress}%\n"
|
| 214 |
+
video_progress_started = True
|
| 215 |
+
elif progress < 100:
|
| 216 |
+
content = f"่ง้ขๅทฒ็ๆ{progress}%\n"
|
| 217 |
+
else:
|
| 218 |
+
content = f"่ง้ขๅทฒ็ๆ{progress}%</think>\n"
|
| 219 |
+
yield make_chunk(content)
|
| 220 |
+
|
| 221 |
+
# ่ง้ขURL
|
| 222 |
+
if v_url:
|
| 223 |
+
logger.debug("[Processor] ่ง้ข็ๆๅฎๆ")
|
| 224 |
+
video_content = await GrokResponseProcessor._build_video_content(v_url, auth_token)
|
| 225 |
+
yield make_chunk(video_content)
|
| 226 |
+
|
| 227 |
+
continue
|
| 228 |
+
|
| 229 |
+
# ๅพ็ๆจกๅผ
|
| 230 |
+
if grok_resp.get("imageAttachmentInfo"):
|
| 231 |
+
is_image = True
|
| 232 |
+
|
| 233 |
+
token = grok_resp.get("token", "")
|
| 234 |
+
|
| 235 |
+
# ๅพ็ๅค็
|
| 236 |
+
if is_image:
|
| 237 |
+
if model_resp := grok_resp.get("modelResponse"):
|
| 238 |
+
image_mode = setting.global_config.get("image_mode", "url")
|
| 239 |
+
content = ""
|
| 240 |
+
|
| 241 |
+
for img in model_resp.get("generatedImageUrls", []):
|
| 242 |
+
try:
|
| 243 |
+
if image_mode == "base64":
|
| 244 |
+
# Base64ๆจกๅผ - ๅๅๅ้
|
| 245 |
+
base64_str = await image_cache_service.download_base64(f"/{img}", auth_token)
|
| 246 |
+
if base64_str:
|
| 247 |
+
# ๅๅๅ้ๅคงๆฐๆฎ
|
| 248 |
+
if not base64_str.startswith("data:"):
|
| 249 |
+
parts = base64_str.split(",", 1)
|
| 250 |
+
if len(parts) == 2:
|
| 251 |
+
yield make_chunk(f"
|
| 252 |
+
# 8KBๅๅ
|
| 253 |
+
for i in range(0, len(parts[1]), 8192):
|
| 254 |
+
yield make_chunk(parts[1][i:i+8192])
|
| 255 |
+
yield make_chunk(")\n")
|
| 256 |
+
else:
|
| 257 |
+
yield make_chunk(f"\n")
|
| 258 |
+
else:
|
| 259 |
+
yield make_chunk(f"\n")
|
| 260 |
+
else:
|
| 261 |
+
yield make_chunk(f"\n")
|
| 262 |
+
else:
|
| 263 |
+
# URLๆจกๅผ
|
| 264 |
+
await image_cache_service.download_image(f"/{img}", auth_token)
|
| 265 |
+
img_path = img.replace('/', '-')
|
| 266 |
+
base_url = setting.global_config.get("base_url", "")
|
| 267 |
+
img_url = f"{base_url}/images/{img_path}" if base_url else f"/images/{img_path}"
|
| 268 |
+
content += f"\n"
|
| 269 |
+
except Exception as e:
|
| 270 |
+
logger.warning(f"[Processor] ๅค็ๅพ็ๅคฑ่ดฅ: {e}")
|
| 271 |
+
content += f"\n"
|
| 272 |
+
|
| 273 |
+
yield make_chunk(content.strip(), "stop")
|
| 274 |
+
return
|
| 275 |
+
elif token:
|
| 276 |
+
yield make_chunk(token)
|
| 277 |
+
|
| 278 |
+
# ๅฏน่ฏๅค็
|
| 279 |
+
else:
|
| 280 |
+
if isinstance(token, list):
|
| 281 |
+
continue
|
| 282 |
+
|
| 283 |
+
if any(tag in token for tag in filtered_tags if token):
|
| 284 |
+
continue
|
| 285 |
+
|
| 286 |
+
current_is_thinking = grok_resp.get("isThinking", False)
|
| 287 |
+
message_tag = grok_resp.get("messageTag")
|
| 288 |
+
|
| 289 |
+
if thinking_finished and current_is_thinking:
|
| 290 |
+
continue
|
| 291 |
+
|
| 292 |
+
# ๆ็ดข็ปๆๅค็
|
| 293 |
+
if grok_resp.get("toolUsageCardId"):
|
| 294 |
+
if web_search := grok_resp.get("webSearchResults"):
|
| 295 |
+
if current_is_thinking:
|
| 296 |
+
if show_thinking:
|
| 297 |
+
for result in web_search.get("results", []):
|
| 298 |
+
title = result.get("title", "")
|
| 299 |
+
url = result.get("url", "")
|
| 300 |
+
preview = result.get("preview", "")
|
| 301 |
+
preview_clean = preview.replace("\n", "") if isinstance(preview, str) else ""
|
| 302 |
+
token += f'\n- [{title}]({url} "{preview_clean}")'
|
| 303 |
+
token += "\n"
|
| 304 |
+
else:
|
| 305 |
+
continue
|
| 306 |
+
else:
|
| 307 |
+
continue
|
| 308 |
+
else:
|
| 309 |
+
continue
|
| 310 |
+
|
| 311 |
+
if token:
|
| 312 |
+
content = token
|
| 313 |
+
|
| 314 |
+
if message_tag == "header":
|
| 315 |
+
content = f"\n\n{token}\n\n"
|
| 316 |
+
|
| 317 |
+
# Thinking็ถๆๅๆข
|
| 318 |
+
should_skip = False
|
| 319 |
+
if not is_thinking and current_is_thinking:
|
| 320 |
+
if show_thinking:
|
| 321 |
+
content = f"<think>\n{content}"
|
| 322 |
+
else:
|
| 323 |
+
should_skip = True
|
| 324 |
+
elif is_thinking and not current_is_thinking:
|
| 325 |
+
if show_thinking:
|
| 326 |
+
content = f"\n</think>\n{content}"
|
| 327 |
+
thinking_finished = True
|
| 328 |
+
elif current_is_thinking:
|
| 329 |
+
if not show_thinking:
|
| 330 |
+
should_skip = True
|
| 331 |
+
|
| 332 |
+
if not should_skip:
|
| 333 |
+
yield make_chunk(content)
|
| 334 |
+
|
| 335 |
+
is_thinking = current_is_thinking
|
| 336 |
+
|
| 337 |
+
except (orjson.JSONDecodeError, UnicodeDecodeError) as e:
|
| 338 |
+
logger.warning(f"[Processor] ่งฃๆๅคฑ่ดฅ: {e}")
|
| 339 |
+
continue
|
| 340 |
+
except Exception as e:
|
| 341 |
+
logger.warning(f"[Processor] ๅค็ๅบ้: {e}")
|
| 342 |
+
continue
|
| 343 |
+
|
| 344 |
+
yield make_chunk("", "stop")
|
| 345 |
+
yield "data: [DONE]\n\n"
|
| 346 |
+
logger.info(f"[Processor] ๆตๅผๅฎๆ๏ผ่ๆถ: {timeout_mgr.duration():.2f}็ง")
|
| 347 |
+
|
| 348 |
+
except Exception as e:
|
| 349 |
+
logger.error(f"[Processor] ไธฅ้้่ฏฏ: {e}")
|
| 350 |
+
yield make_chunk(f"ๅค็้่ฏฏ: {e}", "error")
|
| 351 |
+
yield "data: [DONE]\n\n"
|
| 352 |
+
finally:
|
| 353 |
+
if not response_closed and hasattr(response, 'close'):
|
| 354 |
+
try:
|
| 355 |
+
response.close()
|
| 356 |
+
logger.debug("[Processor] ๅๅบๅทฒๅ
ณ้ญ")
|
| 357 |
+
except Exception as e:
|
| 358 |
+
logger.warning(f"[Processor] ๅ
ณ้ญๅคฑ่ดฅ: {e}")
|
| 359 |
+
|
| 360 |
+
if session:
|
| 361 |
+
try:
|
| 362 |
+
await session.close()
|
| 363 |
+
logger.debug("[Processor] ไผ่ฏๅทฒๅ
ณ้ญ")
|
| 364 |
+
except Exception as e:
|
| 365 |
+
logger.warning(f"[Processor] ๅ
ณ้ญไผ่ฏๅคฑ่ดฅ: {e}")
|
| 366 |
+
|
| 367 |
+
@staticmethod
|
| 368 |
+
async def _build_video_content(video_url: str, auth_token: str) -> str:
|
| 369 |
+
"""ๆๅปบ่ง้ขๅ
ๅฎน"""
|
| 370 |
+
logger.debug(f"[Processor] ๆฃๆตๅฐ่ง้ข: {video_url}")
|
| 371 |
+
full_url = f"https://assets.grok.com/{video_url}"
|
| 372 |
+
|
| 373 |
+
try:
|
| 374 |
+
cache_path = await video_cache_service.download_video(f"/{video_url}", auth_token)
|
| 375 |
+
if cache_path:
|
| 376 |
+
video_path = video_url.replace('/', '-')
|
| 377 |
+
base_url = setting.global_config.get("base_url", "")
|
| 378 |
+
local_url = f"{base_url}/images/{video_path}" if base_url else f"/images/{video_path}"
|
| 379 |
+
return f'<video src="{local_url}" controls="controls" width="500" height="300"></video>\n'
|
| 380 |
+
except Exception as e:
|
| 381 |
+
logger.warning(f"[Processor] ็ผๅญ่ง้ขๅคฑ่ดฅ: {e}")
|
| 382 |
+
|
| 383 |
+
return f'<video src="{full_url}" controls="controls" width="500" height="300"></video>\n'
|
| 384 |
+
|
| 385 |
+
@staticmethod
|
| 386 |
+
async def _append_images(content: str, images: list, auth_token: str) -> str:
|
| 387 |
+
"""่ฟฝๅ ๅพ็ๅฐๅ
ๅฎน"""
|
| 388 |
+
image_mode = setting.global_config.get("image_mode", "url")
|
| 389 |
+
|
| 390 |
+
for img in images:
|
| 391 |
+
try:
|
| 392 |
+
if image_mode == "base64":
|
| 393 |
+
base64_str = await image_cache_service.download_base64(f"/{img}", auth_token)
|
| 394 |
+
if base64_str:
|
| 395 |
+
content += f"\n"
|
| 396 |
+
else:
|
| 397 |
+
content += f"\n"
|
| 398 |
+
else:
|
| 399 |
+
cache_path = await image_cache_service.download_image(f"/{img}", auth_token)
|
| 400 |
+
if cache_path:
|
| 401 |
+
img_path = img.replace('/', '-')
|
| 402 |
+
base_url = setting.global_config.get("base_url", "")
|
| 403 |
+
img_url = f"{base_url}/images/{img_path}" if base_url else f"/images/{img_path}"
|
| 404 |
+
content += f"\n"
|
| 405 |
+
else:
|
| 406 |
+
content += f"\n"
|
| 407 |
+
except Exception as e:
|
| 408 |
+
logger.warning(f"[Processor] ๅค็ๅพ็ๅคฑ่ดฅ: {e}")
|
| 409 |
+
content += f"\n"
|
| 410 |
+
|
| 411 |
+
return content
|
| 412 |
+
|
| 413 |
+
@staticmethod
|
| 414 |
+
def _build_response(content: str, model: str) -> OpenAIChatCompletionResponse:
|
| 415 |
+
"""ๆๅปบๅๅบๅฏน่ฑก"""
|
| 416 |
+
return OpenAIChatCompletionResponse(
|
| 417 |
+
id=f"chatcmpl-{uuid.uuid4()}",
|
| 418 |
+
object="chat.completion",
|
| 419 |
+
created=int(time.time()),
|
| 420 |
+
model=model,
|
| 421 |
+
choices=[OpenAIChatCompletionChoice(
|
| 422 |
+
index=0,
|
| 423 |
+
message=OpenAIChatCompletionMessage(
|
| 424 |
+
role="assistant",
|
| 425 |
+
content=content
|
| 426 |
+
),
|
| 427 |
+
finish_reason="stop"
|
| 428 |
+
)],
|
| 429 |
+
usage=None
|
| 430 |
+
)
|
app/services/grok/statsig.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Grok ่ฏทๆฑๅคด็ฎก็ - ็ๆๅจๆ่ฏทๆฑๅคดๅStatsig ID"""
|
| 2 |
+
|
| 3 |
+
import base64
|
| 4 |
+
import random
|
| 5 |
+
import string
|
| 6 |
+
import uuid
|
| 7 |
+
from typing import Dict
|
| 8 |
+
|
| 9 |
+
from app.core.logger import logger
|
| 10 |
+
from app.core.config import setting
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# ๅบ็ก่ฏทๆฑๅคด
|
| 14 |
+
BASE_HEADERS = {
|
| 15 |
+
"Accept": "*/*",
|
| 16 |
+
"Accept-Language": "zh-CN,zh;q=0.9",
|
| 17 |
+
"Accept-Encoding": "gzip, deflate, br, zstd",
|
| 18 |
+
"Connection": "keep-alive",
|
| 19 |
+
"Origin": "https://grok.com",
|
| 20 |
+
"Priority": "u=1, i",
|
| 21 |
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
|
| 22 |
+
"Sec-Ch-Ua": '"Not(A:Brand";v="99", "Google Chrome";v="133", "Chromium";v="133"',
|
| 23 |
+
"Sec-Ch-Ua-Mobile": "?0",
|
| 24 |
+
"Sec-Ch-Ua-Platform": '"macOS"',
|
| 25 |
+
"Sec-Fetch-Dest": "empty",
|
| 26 |
+
"Sec-Fetch-Mode": "cors",
|
| 27 |
+
"Sec-Fetch-Site": "same-origin",
|
| 28 |
+
"Baggage": "sentry-environment=production,sentry-public_key=b311e0f2690c81f25e2c4cf6d4f7ce1c",
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _random_string(length: int, letters_only: bool = True) -> str:
|
| 33 |
+
"""็ๆ้ๆบๅญ็ฌฆไธฒ"""
|
| 34 |
+
chars = string.ascii_lowercase if letters_only else string.ascii_lowercase + string.digits
|
| 35 |
+
return ''.join(random.choices(chars, k=length))
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _generate_statsig_id() -> str:
|
| 39 |
+
"""็ๆx-statsig-id
|
| 40 |
+
|
| 41 |
+
้ๆบ้ๆฉไธค็งๆ ผๅผ๏ผ
|
| 42 |
+
1. e:TypeError: Cannot read properties of null (reading 'children['xxxxx']')
|
| 43 |
+
2. e:TypeError: Cannot read properties of undefined (reading 'xxxxxxxxxx')
|
| 44 |
+
"""
|
| 45 |
+
if random.choice([True, False]):
|
| 46 |
+
rand = _random_string(5, letters_only=False)
|
| 47 |
+
msg = f"e:TypeError: Cannot read properties of null (reading 'children['{rand}']')"
|
| 48 |
+
else:
|
| 49 |
+
rand = _random_string(10)
|
| 50 |
+
msg = f"e:TypeError: Cannot read properties of undefined (reading '{rand}')"
|
| 51 |
+
|
| 52 |
+
return base64.b64encode(msg.encode()).decode()
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def get_dynamic_headers(pathname: str = "/rest/app-chat/conversations/new") -> Dict[str, str]:
|
| 56 |
+
"""่ทๅ่ฏทๆฑๅคด
|
| 57 |
+
|
| 58 |
+
Args:
|
| 59 |
+
pathname: ่ฏทๆฑ่ทฏๅพ
|
| 60 |
+
|
| 61 |
+
Returns:
|
| 62 |
+
ๅฎๆด็่ฏทๆฑๅคดๅญๅ
ธ
|
| 63 |
+
"""
|
| 64 |
+
# ่ทๅๆ็ๆstatsig-id
|
| 65 |
+
if setting.grok_config.get("dynamic_statsig", False):
|
| 66 |
+
statsig_id = _generate_statsig_id()
|
| 67 |
+
logger.debug(f"[Statsig] ๅจๆ็ๆ: {statsig_id}")
|
| 68 |
+
else:
|
| 69 |
+
statsig_id = setting.grok_config.get("x_statsig_id")
|
| 70 |
+
if not statsig_id:
|
| 71 |
+
raise ValueError("้
็ฝฎๆไปถไธญๆช่ฎพ็ฝฎ x_statsig_id")
|
| 72 |
+
logger.debug(f"[Statsig] ไฝฟ็จๅบๅฎๅผ: {statsig_id}")
|
| 73 |
+
|
| 74 |
+
# ๆๅปบ่ฏทๆฑๅคด
|
| 75 |
+
headers = BASE_HEADERS.copy()
|
| 76 |
+
headers["x-statsig-id"] = statsig_id
|
| 77 |
+
headers["x-xai-request-id"] = str(uuid.uuid4())
|
| 78 |
+
headers["Content-Type"] = "text/plain;charset=UTF-8" if "upload-file" in pathname else "application/json"
|
| 79 |
+
|
| 80 |
+
return headers
|
app/services/grok/token.py
ADDED
|
@@ -0,0 +1,619 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Grok Token ็ฎก็ๅจ - ๅไพๆจกๅผ็Token่ด่ฝฝๅ่กกๅ็ถๆ็ฎก็"""
|
| 2 |
+
|
| 3 |
+
import orjson
|
| 4 |
+
import time
|
| 5 |
+
import asyncio
|
| 6 |
+
import aiofiles
|
| 7 |
+
import portalocker
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from curl_cffi.requests import AsyncSession
|
| 10 |
+
from typing import Dict, Any, Optional, Tuple
|
| 11 |
+
|
| 12 |
+
from app.models.grok_models import TokenType, Models
|
| 13 |
+
from app.core.exception import GrokApiException
|
| 14 |
+
from app.core.logger import logger
|
| 15 |
+
from app.core.config import setting
|
| 16 |
+
from app.services.grok.statsig import get_dynamic_headers
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# ๅธธ้
|
| 20 |
+
RATE_LIMIT_API = "https://grok.com/rest/rate-limits"
|
| 21 |
+
TIMEOUT = 30
|
| 22 |
+
BROWSER = "chrome133a"
|
| 23 |
+
MAX_FAILURES = 3
|
| 24 |
+
TOKEN_INVALID = 401
|
| 25 |
+
STATSIG_INVALID = 403
|
| 26 |
+
|
| 27 |
+
# ๅทๅดๅธธ้
|
| 28 |
+
COOLDOWN_REQUESTS = 5 # ๆฎ้ๅคฑ่ดฅๅทๅด่ฏทๆฑๆฐ
|
| 29 |
+
COOLDOWN_429_WITH_QUOTA = 3600 # 429+ๆ้ขๅบฆๅทๅด1ๅฐๆถ๏ผ็ง๏ผ
|
| 30 |
+
COOLDOWN_429_NO_QUOTA = 36000 # 429+ๆ ้ขๅบฆๅทๅด10ๅฐๆถ๏ผ็ง๏ผ
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class GrokTokenManager:
|
| 34 |
+
"""Token็ฎก็ๅจ๏ผๅไพ๏ผ"""
|
| 35 |
+
|
| 36 |
+
_instance: Optional['GrokTokenManager'] = None
|
| 37 |
+
_lock = asyncio.Lock()
|
| 38 |
+
|
| 39 |
+
def __new__(cls) -> 'GrokTokenManager':
|
| 40 |
+
if cls._instance is None:
|
| 41 |
+
cls._instance = super().__new__(cls)
|
| 42 |
+
return cls._instance
|
| 43 |
+
|
| 44 |
+
def __init__(self):
|
| 45 |
+
if hasattr(self, '_initialized'):
|
| 46 |
+
return
|
| 47 |
+
|
| 48 |
+
self.token_file = Path(__file__).parents[3] / "data" / "token.json"
|
| 49 |
+
self._file_lock = asyncio.Lock()
|
| 50 |
+
self.token_file.parent.mkdir(parents=True, exist_ok=True)
|
| 51 |
+
self._storage = None
|
| 52 |
+
self.token_data = None # ๅปถ่ฟๅ ่ฝฝ
|
| 53 |
+
|
| 54 |
+
# ๆน้ไฟๅญ้ๅ
|
| 55 |
+
self._save_pending = False # ๆ ่ฎฐๆฏๅฆๆๅพ
ไฟๅญ็ๆฐๆฎ
|
| 56 |
+
self._save_task = None # ๅๅฐไฟๅญไปปๅก
|
| 57 |
+
self._shutdown = False # ๅ
ณ้ญๆ ๅฟ
|
| 58 |
+
|
| 59 |
+
# ๅทๅด็ถๆ
|
| 60 |
+
self._cooldown_counts: Dict[str, int] = {} # Token -> ๅฉไฝๅทๅดๆฌกๆฐ
|
| 61 |
+
self._request_counter = 0 # ๅ
จๅฑ่ฏทๆฑ่ฎกๆฐๅจ
|
| 62 |
+
|
| 63 |
+
# ๅทๆฐ็ถๆ
|
| 64 |
+
self._refresh_lock = False # ๅทๆฐ้
|
| 65 |
+
self._refresh_progress: Dict[str, Any] = {"running": False, "current": 0, "total": 0, "success": 0, "failed": 0}
|
| 66 |
+
|
| 67 |
+
self._initialized = True
|
| 68 |
+
logger.debug(f"[Token] ๅๅงๅๅฎๆ: {self.token_file}")
|
| 69 |
+
|
| 70 |
+
def set_storage(self, storage) -> None:
|
| 71 |
+
"""่ฎพ็ฝฎๅญๅจๅฎไพ"""
|
| 72 |
+
self._storage = storage
|
| 73 |
+
|
| 74 |
+
async def _load_data(self) -> None:
|
| 75 |
+
"""ๅผๆญฅๅ ่ฝฝTokenๆฐๆฎ๏ผๆฏๆๅค่ฟ็จ๏ผ"""
|
| 76 |
+
default = {TokenType.NORMAL.value: {}, TokenType.SUPER.value: {}}
|
| 77 |
+
|
| 78 |
+
def load_sync():
|
| 79 |
+
with open(self.token_file, "r", encoding="utf-8") as f:
|
| 80 |
+
portalocker.lock(f, portalocker.LOCK_SH)
|
| 81 |
+
try:
|
| 82 |
+
return orjson.loads(f.read())
|
| 83 |
+
finally:
|
| 84 |
+
portalocker.unlock(f)
|
| 85 |
+
|
| 86 |
+
try:
|
| 87 |
+
if self.token_file.exists():
|
| 88 |
+
# ไฝฟ็จ่ฟ็จ้่ฏปๅๆไปถ
|
| 89 |
+
async with self._file_lock:
|
| 90 |
+
self.token_data = await asyncio.to_thread(load_sync)
|
| 91 |
+
else:
|
| 92 |
+
self.token_data = default
|
| 93 |
+
logger.debug("[Token] ๅๅปบๆฐๆฐๆฎๆไปถ")
|
| 94 |
+
except Exception as e:
|
| 95 |
+
logger.error(f"[Token] ๅ ่ฝฝๅคฑ่ดฅ: {e}")
|
| 96 |
+
self.token_data = default
|
| 97 |
+
|
| 98 |
+
async def _save_data(self) -> None:
|
| 99 |
+
"""ไฟๅญTokenๆฐๆฎ๏ผๆฏๆๅค่ฟ็จ๏ผ"""
|
| 100 |
+
def save_sync(data):
|
| 101 |
+
with open(self.token_file, "w", encoding="utf-8") as f:
|
| 102 |
+
portalocker.lock(f, portalocker.LOCK_EX)
|
| 103 |
+
try:
|
| 104 |
+
content = orjson.dumps(data, option=orjson.OPT_INDENT_2).decode()
|
| 105 |
+
f.write(content)
|
| 106 |
+
f.flush()
|
| 107 |
+
finally:
|
| 108 |
+
portalocker.unlock(f)
|
| 109 |
+
|
| 110 |
+
try:
|
| 111 |
+
if not self._storage:
|
| 112 |
+
async with self._file_lock:
|
| 113 |
+
await asyncio.to_thread(save_sync, self.token_data)
|
| 114 |
+
else:
|
| 115 |
+
await self._storage.save_tokens(self.token_data)
|
| 116 |
+
except Exception as e:
|
| 117 |
+
logger.error(f"[Token] ไฟๅญๅคฑ่ดฅ: {e}")
|
| 118 |
+
raise GrokApiException(f"ไฟๅญๅคฑ่ดฅ: {e}", "TOKEN_SAVE_ERROR")
|
| 119 |
+
|
| 120 |
+
def _mark_dirty(self) -> None:
|
| 121 |
+
"""ๆ ่ฎฐๆๅพ
ไฟๅญ็ๆฐๆฎ"""
|
| 122 |
+
self._save_pending = True
|
| 123 |
+
|
| 124 |
+
async def _batch_save_worker(self) -> None:
|
| 125 |
+
"""ๆน้ไฟๅญๅๅฐไปปๅก"""
|
| 126 |
+
from app.core.config import setting
|
| 127 |
+
|
| 128 |
+
interval = setting.global_config.get("batch_save_interval", 1.0)
|
| 129 |
+
logger.info(f"[Token] ๅญๅจไปปๅกๅทฒๅฏๅจ๏ผ้ด้: {interval}s")
|
| 130 |
+
|
| 131 |
+
while not self._shutdown:
|
| 132 |
+
await asyncio.sleep(interval)
|
| 133 |
+
|
| 134 |
+
if self._save_pending and not self._shutdown:
|
| 135 |
+
try:
|
| 136 |
+
await self._save_data()
|
| 137 |
+
self._save_pending = False
|
| 138 |
+
logger.debug("[Token] ๅญๅจๅฎๆ")
|
| 139 |
+
except Exception as e:
|
| 140 |
+
logger.error(f"[Token] ๅญๅจๅคฑ่ดฅ: {e}")
|
| 141 |
+
|
| 142 |
+
async def start_batch_save(self) -> None:
|
| 143 |
+
"""ๅฏๅจๆน้ไฟๅญไปปๅก"""
|
| 144 |
+
if self._save_task is None:
|
| 145 |
+
self._save_task = asyncio.create_task(self._batch_save_worker())
|
| 146 |
+
logger.info("[Token] ๅญๅจไปปๅกๅทฒๅๅปบ")
|
| 147 |
+
|
| 148 |
+
async def shutdown(self) -> None:
|
| 149 |
+
"""ๅ
ณ้ญๅนถๅทๆฐๆๆๅพ
ไฟๅญๆฐๆฎ"""
|
| 150 |
+
self._shutdown = True
|
| 151 |
+
|
| 152 |
+
if self._save_task:
|
| 153 |
+
self._save_task.cancel()
|
| 154 |
+
try:
|
| 155 |
+
await self._save_task
|
| 156 |
+
except asyncio.CancelledError:
|
| 157 |
+
pass
|
| 158 |
+
|
| 159 |
+
# ๆ็ปๅทๆฐ
|
| 160 |
+
if self._save_pending:
|
| 161 |
+
await self._save_data()
|
| 162 |
+
logger.info("[Token] ๅ
ณ้ญๆถๅทๆฐๅฎๆ")
|
| 163 |
+
|
| 164 |
+
@staticmethod
|
| 165 |
+
def _extract_sso(auth_token: str) -> Optional[str]:
|
| 166 |
+
"""ๆๅSSOๅผ"""
|
| 167 |
+
if "sso=" in auth_token:
|
| 168 |
+
return auth_token.split("sso=")[1].split(";")[0]
|
| 169 |
+
logger.warning("[Token] ๆ ๆณๆๅSSOๅผ")
|
| 170 |
+
return None
|
| 171 |
+
|
| 172 |
+
def _find_token(self, sso: str) -> Tuple[Optional[str], Optional[Dict]]:
|
| 173 |
+
"""ๆฅๆพToken"""
|
| 174 |
+
for token_type in [TokenType.NORMAL.value, TokenType.SUPER.value]:
|
| 175 |
+
if sso in self.token_data[token_type]:
|
| 176 |
+
return token_type, self.token_data[token_type][sso]
|
| 177 |
+
return None, None
|
| 178 |
+
|
| 179 |
+
async def add_token(self, tokens: list[str], token_type: TokenType) -> None:
|
| 180 |
+
"""ๆทปๅ Token"""
|
| 181 |
+
if not tokens:
|
| 182 |
+
return
|
| 183 |
+
|
| 184 |
+
count = 0
|
| 185 |
+
for token in tokens:
|
| 186 |
+
if not token or not token.strip():
|
| 187 |
+
continue
|
| 188 |
+
|
| 189 |
+
self.token_data[token_type.value][token] = {
|
| 190 |
+
"createdTime": int(time.time() * 1000),
|
| 191 |
+
"remainingQueries": -1,
|
| 192 |
+
"heavyremainingQueries": -1,
|
| 193 |
+
"status": "active",
|
| 194 |
+
"failedCount": 0,
|
| 195 |
+
"lastFailureTime": None,
|
| 196 |
+
"lastFailureReason": None,
|
| 197 |
+
"tags": [],
|
| 198 |
+
"note": ""
|
| 199 |
+
}
|
| 200 |
+
count += 1
|
| 201 |
+
|
| 202 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 203 |
+
logger.info(f"[Token] ๆทปๅ {count} ไธช {token_type.value} Token")
|
| 204 |
+
|
| 205 |
+
async def delete_token(self, tokens: list[str], token_type: TokenType) -> None:
|
| 206 |
+
"""ๅ ้คToken"""
|
| 207 |
+
if not tokens:
|
| 208 |
+
return
|
| 209 |
+
|
| 210 |
+
count = 0
|
| 211 |
+
for token in tokens:
|
| 212 |
+
if token in self.token_data[token_type.value]:
|
| 213 |
+
del self.token_data[token_type.value][token]
|
| 214 |
+
count += 1
|
| 215 |
+
|
| 216 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 217 |
+
logger.info(f"[Token] ๅ ้ค {count} ไธช {token_type.value} Token")
|
| 218 |
+
|
| 219 |
+
async def update_token_tags(self, token: str, token_type: TokenType, tags: list[str]) -> None:
|
| 220 |
+
"""ๆดๆฐTokenๆ ็ญพ"""
|
| 221 |
+
if token not in self.token_data[token_type.value]:
|
| 222 |
+
raise GrokApiException("Tokenไธๅญๅจ", "TOKEN_NOT_FOUND", {"token": token[:10]})
|
| 223 |
+
|
| 224 |
+
cleaned = [t.strip() for t in tags if t and t.strip()]
|
| 225 |
+
self.token_data[token_type.value][token]["tags"] = cleaned
|
| 226 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 227 |
+
logger.info(f"[Token] ๆดๆฐๆ ็ญพ: {token[:10]}... -> {cleaned}")
|
| 228 |
+
|
| 229 |
+
async def update_token_note(self, token: str, token_type: TokenType, note: str) -> None:
|
| 230 |
+
"""ๆดๆฐTokenๅคๆณจ"""
|
| 231 |
+
if token not in self.token_data[token_type.value]:
|
| 232 |
+
raise GrokApiException("Tokenไธๅญๅจ", "TOKEN_NOT_FOUND", {"token": token[:10]})
|
| 233 |
+
|
| 234 |
+
self.token_data[token_type.value][token]["note"] = note.strip()
|
| 235 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 236 |
+
logger.info(f"[Token] ๆดๆฐๅคๆณจ: {token[:10]}...")
|
| 237 |
+
|
| 238 |
+
def get_tokens(self) -> Dict[str, Any]:
|
| 239 |
+
"""่ทๅๆๆToken"""
|
| 240 |
+
return self.token_data.copy()
|
| 241 |
+
|
| 242 |
+
async def _reload_if_needed(self) -> None:
|
| 243 |
+
"""ๅจๅค่ฟ็จๆจกๅผไธ้ๆฐๅ ่ฝฝๆฐๆฎ"""
|
| 244 |
+
# ๅชๅจๆไปถๆจกๅผไธๅค่ฟ็จ็ฏๅขไธๆ้ๆฐๅ ่ฝฝ
|
| 245 |
+
if self._storage:
|
| 246 |
+
return
|
| 247 |
+
|
| 248 |
+
def reload_sync():
|
| 249 |
+
with open(self.token_file, "r", encoding="utf-8") as f:
|
| 250 |
+
portalocker.lock(f, portalocker.LOCK_SH)
|
| 251 |
+
try:
|
| 252 |
+
return orjson.loads(f.read())
|
| 253 |
+
finally:
|
| 254 |
+
portalocker.unlock(f)
|
| 255 |
+
|
| 256 |
+
try:
|
| 257 |
+
if self.token_file.exists():
|
| 258 |
+
self.token_data = await asyncio.to_thread(reload_sync)
|
| 259 |
+
except Exception as e:
|
| 260 |
+
logger.warning(f"[Token] ้ๆฐๅ ่ฝฝๅคฑ่ดฅ: {e}")
|
| 261 |
+
|
| 262 |
+
async def get_token(self, model: str) -> str:
|
| 263 |
+
"""่ทๅToken"""
|
| 264 |
+
jwt = await self.select_token(model)
|
| 265 |
+
return f"sso-rw={jwt};sso={jwt}"
|
| 266 |
+
|
| 267 |
+
async def select_token(self, model: str) -> str:
|
| 268 |
+
"""้ๆฉๆไผToken๏ผๅค่ฟ็จๅฎๅ
จ๏ผๆฏๆๅทๅด๏ผ"""
|
| 269 |
+
# ้ๆฐๅ ่ฝฝๆๆฐๆฐๆฎ๏ผๅค่ฟ็จๆจกๅผ๏ผ
|
| 270 |
+
await self._reload_if_needed()
|
| 271 |
+
|
| 272 |
+
# ้ๅๆๆๆฌกๆฐๅทๅด่ฎกๆฐ
|
| 273 |
+
self._request_counter += 1
|
| 274 |
+
for token in list(self._cooldown_counts.keys()):
|
| 275 |
+
self._cooldown_counts[token] -= 1
|
| 276 |
+
if self._cooldown_counts[token] <= 0:
|
| 277 |
+
del self._cooldown_counts[token]
|
| 278 |
+
logger.debug(f"[Token] ๅทๅด็ปๆ: {token[:10]}...")
|
| 279 |
+
|
| 280 |
+
current_time = time.time() * 1000 # ๆฏซ็ง
|
| 281 |
+
|
| 282 |
+
def select_best(tokens: Dict[str, Any], field: str) -> Tuple[Optional[str], Optional[int]]:
|
| 283 |
+
"""้ๆฉๆไฝณToken"""
|
| 284 |
+
unused, used = [], []
|
| 285 |
+
|
| 286 |
+
for key, data in tokens.items():
|
| 287 |
+
# ่ทณ่ฟๅทฒๅคฑๆ็token
|
| 288 |
+
if data.get("status") == "expired":
|
| 289 |
+
continue
|
| 290 |
+
|
| 291 |
+
# ่ทณ่ฟๅคฑ่ดฅๆฌกๆฐ่ฟๅค็token๏ผไปปไฝ้่ฏฏ็ถๆ็ ๏ผ
|
| 292 |
+
if data.get("failedCount", 0) >= MAX_FAILURES:
|
| 293 |
+
continue
|
| 294 |
+
|
| 295 |
+
# ่ทณ่ฟๆฌกๆฐๅทๅดไธญ็token
|
| 296 |
+
if key in self._cooldown_counts:
|
| 297 |
+
continue
|
| 298 |
+
|
| 299 |
+
# ่ทณ่ฟๆถ้ดๅทๅดไธญ็token๏ผ429๏ผ
|
| 300 |
+
cooldown_until = data.get("cooldownUntil", 0)
|
| 301 |
+
if cooldown_until and cooldown_until > current_time:
|
| 302 |
+
continue
|
| 303 |
+
|
| 304 |
+
remaining = int(data.get(field, -1))
|
| 305 |
+
if remaining == 0:
|
| 306 |
+
continue
|
| 307 |
+
|
| 308 |
+
if remaining == -1:
|
| 309 |
+
unused.append(key)
|
| 310 |
+
elif remaining > 0:
|
| 311 |
+
used.append((key, remaining))
|
| 312 |
+
|
| 313 |
+
if unused:
|
| 314 |
+
return unused[0], -1
|
| 315 |
+
if used:
|
| 316 |
+
used.sort(key=lambda x: x[1], reverse=True)
|
| 317 |
+
return used[0][0], used[0][1]
|
| 318 |
+
return None, None
|
| 319 |
+
|
| 320 |
+
# ๅฟซ็
ง
|
| 321 |
+
snapshot = {
|
| 322 |
+
TokenType.NORMAL.value: self.token_data[TokenType.NORMAL.value].copy(),
|
| 323 |
+
TokenType.SUPER.value: self.token_data[TokenType.SUPER.value].copy()
|
| 324 |
+
}
|
| 325 |
+
|
| 326 |
+
# ้ๆฉ็ญ็ฅ
|
| 327 |
+
if model == "grok-4-heavy":
|
| 328 |
+
field = "heavyremainingQueries"
|
| 329 |
+
token_key, remaining = select_best(snapshot[TokenType.SUPER.value], field)
|
| 330 |
+
else:
|
| 331 |
+
field = "remainingQueries"
|
| 332 |
+
token_key, remaining = select_best(snapshot[TokenType.NORMAL.value], field)
|
| 333 |
+
if token_key is None:
|
| 334 |
+
token_key, remaining = select_best(snapshot[TokenType.SUPER.value], field)
|
| 335 |
+
|
| 336 |
+
if token_key is None:
|
| 337 |
+
raise GrokApiException(
|
| 338 |
+
f"ๆฒกๆๅฏ็จToken: {model}",
|
| 339 |
+
"NO_AVAILABLE_TOKEN",
|
| 340 |
+
{
|
| 341 |
+
"model": model,
|
| 342 |
+
"normal": len(snapshot[TokenType.NORMAL.value]),
|
| 343 |
+
"super": len(snapshot[TokenType.SUPER.value]),
|
| 344 |
+
"cooldown_count": len(self._cooldown_counts)
|
| 345 |
+
}
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
status = "ๆชไฝฟ็จ" if remaining == -1 else f"ๅฉไฝ{remaining}ๆฌก"
|
| 349 |
+
logger.debug(f"[Token] ๅ้
Token: {model} ({status})")
|
| 350 |
+
return token_key
|
| 351 |
+
|
| 352 |
+
async def check_limits(self, auth_token: str, model: str) -> Optional[Dict[str, Any]]:
|
| 353 |
+
"""ๆฃๆฅ้็้ๅถ"""
|
| 354 |
+
try:
|
| 355 |
+
rate_model = Models.to_rate_limit(model)
|
| 356 |
+
payload = {"requestKind": "DEFAULT", "modelName": rate_model}
|
| 357 |
+
|
| 358 |
+
cf = setting.grok_config.get("cf_clearance", "")
|
| 359 |
+
headers = get_dynamic_headers("/rest/rate-limits")
|
| 360 |
+
headers["Cookie"] = f"{auth_token};{cf}" if cf else auth_token
|
| 361 |
+
|
| 362 |
+
# ๅคๅฑ้่ฏ๏ผๅฏ้
็ฝฎ็ถๆ็ ๏ผ401/429็ญ๏ผ
|
| 363 |
+
retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
|
| 364 |
+
MAX_OUTER_RETRY = 3
|
| 365 |
+
|
| 366 |
+
for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎ้
้่ฏ3ๆฌก
|
| 367 |
+
# ๅ
ๅฑ้่ฏ๏ผ403ไปฃ็ๆฑ ้่ฏ
|
| 368 |
+
max_403_retries = 5
|
| 369 |
+
retry_403_count = 0
|
| 370 |
+
|
| 371 |
+
while retry_403_count <= max_403_retries:
|
| 372 |
+
# ๅผๆญฅ่ทๅไปฃ็๏ผๆฏๆไปฃ็ๆฑ ๏ผ
|
| 373 |
+
from app.core.proxy_pool import proxy_pool
|
| 374 |
+
|
| 375 |
+
# ๅฆๆๆฏ403้่ฏไธไฝฟ็จไปฃ็ๆฑ ๏ผๅผบๅถๅทๆฐไปฃ็
|
| 376 |
+
if retry_403_count > 0 and proxy_pool._enabled:
|
| 377 |
+
logger.info(f"[Token] 403้่ฏ {retry_403_count}/{max_403_retries}๏ผๅทๆฐไปฃ็...")
|
| 378 |
+
proxy = await proxy_pool.force_refresh()
|
| 379 |
+
else:
|
| 380 |
+
proxy = await setting.get_proxy_async("service")
|
| 381 |
+
|
| 382 |
+
proxies = {"http": proxy, "https": proxy} if proxy else None
|
| 383 |
+
|
| 384 |
+
async with AsyncSession() as session:
|
| 385 |
+
response = await session.post(
|
| 386 |
+
RATE_LIMIT_API,
|
| 387 |
+
headers=headers,
|
| 388 |
+
json=payload,
|
| 389 |
+
impersonate=BROWSER,
|
| 390 |
+
timeout=TIMEOUT,
|
| 391 |
+
proxies=proxies
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
# ๅ
ๅฑ403้่ฏ๏ผไป
ๅฝๆไปฃ็ๆฑ ๆถ่งฆๅ
|
| 395 |
+
if response.status_code == 403 and proxy_pool._enabled:
|
| 396 |
+
retry_403_count += 1
|
| 397 |
+
|
| 398 |
+
if retry_403_count <= max_403_retries:
|
| 399 |
+
logger.warning(f"[Token] ้ๅฐ403้่ฏฏ๏ผๆญฃๅจ้่ฏ ({retry_403_count}/{max_403_retries})...")
|
| 400 |
+
await asyncio.sleep(0.5)
|
| 401 |
+
continue
|
| 402 |
+
|
| 403 |
+
# ๅ
ๅฑ้่ฏๅ
จ้จๅคฑ่ดฅ
|
| 404 |
+
logger.error(f"[Token] 403้่ฏฏ๏ผๅทฒ้่ฏ{retry_403_count-1}ๆฌก๏ผๆพๅผ")
|
| 405 |
+
sso = self._extract_sso(auth_token)
|
| 406 |
+
if sso:
|
| 407 |
+
await self.record_failure(auth_token, 403, "ๆๅกๅจ่ขซBlock")
|
| 408 |
+
|
| 409 |
+
# ๆฃๆฅๅฏ้
็ฝฎ็ถๆ็ ้่ฏฏ - ๅคๅฑ้่ฏ
|
| 410 |
+
if response.status_code in retry_codes:
|
| 411 |
+
if outer_retry < MAX_OUTER_RETRY:
|
| 412 |
+
delay = (outer_retry + 1) * 0.1 # ๆธ่ฟๅปถ่ฟ๏ผ0.1s, 0.2s, 0.3s
|
| 413 |
+
logger.warning(f"[Token] ้ๅฐ{response.status_code}้่ฏฏ๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผ็ญๅพ
{delay}s...")
|
| 414 |
+
await asyncio.sleep(delay)
|
| 415 |
+
break # ่ทณๅบๅ
ๅฑๅพช็ฏ๏ผ่ฟๅ
ฅๅคๅฑ้่ฏ
|
| 416 |
+
else:
|
| 417 |
+
logger.error(f"[Token] {response.status_code}้่ฏฏ๏ผๅทฒ้่ฏ{outer_retry}ๆฌก๏ผๆพๅผ")
|
| 418 |
+
sso = self._extract_sso(auth_token)
|
| 419 |
+
if sso:
|
| 420 |
+
if response.status_code == 401:
|
| 421 |
+
await self.record_failure(auth_token, 401, "Tokenๅคฑๆ")
|
| 422 |
+
else:
|
| 423 |
+
await self.record_failure(auth_token, response.status_code, f"้่ฏฏ: {response.status_code}")
|
| 424 |
+
return None
|
| 425 |
+
|
| 426 |
+
if response.status_code == 200:
|
| 427 |
+
data = response.json()
|
| 428 |
+
sso = self._extract_sso(auth_token)
|
| 429 |
+
|
| 430 |
+
if outer_retry > 0 or retry_403_count > 0:
|
| 431 |
+
logger.info(f"[Token] ้่ฏๆๅ๏ผ")
|
| 432 |
+
|
| 433 |
+
if sso:
|
| 434 |
+
if model == "grok-4-heavy":
|
| 435 |
+
await self.update_limits(sso, normal=None, heavy=data.get("remainingQueries", -1))
|
| 436 |
+
logger.info(f"[Token] ๆดๆฐ้ๅถ: {sso[:10]}..., heavy={data.get('remainingQueries', -1)}")
|
| 437 |
+
else:
|
| 438 |
+
await self.update_limits(sso, normal=data.get("remainingTokens", -1), heavy=None)
|
| 439 |
+
logger.info(f"[Token] ๆดๆฐ้ๅถ: {sso[:10]}..., basic={data.get('remainingTokens', -1)}")
|
| 440 |
+
|
| 441 |
+
return data
|
| 442 |
+
else:
|
| 443 |
+
# ๅ
ถไป้่ฏฏ
|
| 444 |
+
logger.warning(f"[Token] ่ทๅ้ๅถๅคฑ่ดฅ: {response.status_code}")
|
| 445 |
+
sso = self._extract_sso(auth_token)
|
| 446 |
+
if sso:
|
| 447 |
+
await self.record_failure(auth_token, response.status_code, f"้่ฏฏ: {response.status_code}")
|
| 448 |
+
return None
|
| 449 |
+
|
| 450 |
+
except Exception as e:
|
| 451 |
+
logger.error(f"[Token] ๆฃๆฅ้ๅถ้่ฏฏ: {e}")
|
| 452 |
+
return None
|
| 453 |
+
|
| 454 |
+
async def update_limits(self, sso: str, normal: Optional[int] = None, heavy: Optional[int] = None) -> None:
|
| 455 |
+
"""ๆดๆฐ้ๅถ"""
|
| 456 |
+
try:
|
| 457 |
+
for token_type in [TokenType.NORMAL.value, TokenType.SUPER.value]:
|
| 458 |
+
if sso in self.token_data[token_type]:
|
| 459 |
+
if normal is not None:
|
| 460 |
+
self.token_data[token_type][sso]["remainingQueries"] = normal
|
| 461 |
+
if heavy is not None:
|
| 462 |
+
self.token_data[token_type][sso]["heavyremainingQueries"] = heavy
|
| 463 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 464 |
+
logger.info(f"[Token] ๆดๆฐ้ๅถ: {sso[:10]}...")
|
| 465 |
+
return
|
| 466 |
+
logger.warning(f"[Token] ๆชๆพๅฐ: {sso[:10]}...")
|
| 467 |
+
except Exception as e:
|
| 468 |
+
logger.error(f"[Token] ๆดๆฐ้ๅถ้่ฏฏ: {e}")
|
| 469 |
+
|
| 470 |
+
async def record_failure(self, auth_token: str, status: int, msg: str) -> None:
|
| 471 |
+
"""่ฎฐๅฝๅคฑ่ดฅ"""
|
| 472 |
+
try:
|
| 473 |
+
if status == STATSIG_INVALID:
|
| 474 |
+
logger.warning("[Token] IP่ขซBlock๏ผ่ฏท: 1.ๆดๆขIP 2.ไฝฟ็จไปฃ็ 3.้
็ฝฎCFๅผ")
|
| 475 |
+
return
|
| 476 |
+
|
| 477 |
+
sso = self._extract_sso(auth_token)
|
| 478 |
+
if not sso:
|
| 479 |
+
return
|
| 480 |
+
|
| 481 |
+
_, data = self._find_token(sso)
|
| 482 |
+
if not data:
|
| 483 |
+
logger.warning(f"[Token] ๆชๆพๅฐ: {sso[:10]}...")
|
| 484 |
+
return
|
| 485 |
+
|
| 486 |
+
data["failedCount"] = data.get("failedCount", 0) + 1
|
| 487 |
+
data["lastFailureTime"] = int(time.time() * 1000)
|
| 488 |
+
data["lastFailureReason"] = f"{status}: {msg}"
|
| 489 |
+
|
| 490 |
+
logger.warning(
|
| 491 |
+
f"[Token] ๅคฑ่ดฅ: {sso[:10]}... (็ถๆ:{status}), "
|
| 492 |
+
f"ๆฌกๆฐ: {data['failedCount']}/{MAX_FAILURES}, ๅๅ : {msg}"
|
| 493 |
+
)
|
| 494 |
+
|
| 495 |
+
if 400 <= status < 500 and data["failedCount"] >= MAX_FAILURES:
|
| 496 |
+
data["status"] = "expired"
|
| 497 |
+
logger.error(f"[Token] ๆ ่ฎฐๅคฑๆ: {sso[:10]}... (่ฟ็ปญ{status}้่ฏฏ{data['failedCount']}ๆฌก)")
|
| 498 |
+
|
| 499 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 500 |
+
|
| 501 |
+
except Exception as e:
|
| 502 |
+
logger.error(f"[Token] ่ฎฐๅฝๅคฑ่ดฅ้่ฏฏ: {e}")
|
| 503 |
+
|
| 504 |
+
async def reset_failure(self, auth_token: str) -> None:
|
| 505 |
+
"""้็ฝฎๅคฑ่ดฅ่ฎกๆฐ"""
|
| 506 |
+
try:
|
| 507 |
+
sso = self._extract_sso(auth_token)
|
| 508 |
+
if not sso:
|
| 509 |
+
return
|
| 510 |
+
|
| 511 |
+
_, data = self._find_token(sso)
|
| 512 |
+
if not data:
|
| 513 |
+
return
|
| 514 |
+
|
| 515 |
+
if data.get("failedCount", 0) > 0:
|
| 516 |
+
data["failedCount"] = 0
|
| 517 |
+
data["lastFailureTime"] = None
|
| 518 |
+
data["lastFailureReason"] = None
|
| 519 |
+
self._mark_dirty() # ๆน้ไฟๅญ
|
| 520 |
+
logger.info(f"[Token] ้็ฝฎๅคฑ่ดฅ่ฎกๆฐ: {sso[:10]}...")
|
| 521 |
+
|
| 522 |
+
except Exception as e:
|
| 523 |
+
logger.error(f"[Token] ้็ฝฎๅคฑ่ดฅ้่ฏฏ: {e}")
|
| 524 |
+
|
| 525 |
+
async def apply_cooldown(self, auth_token: str, status_code: int) -> None:
|
| 526 |
+
"""ๅบ็จๅทๅด็ญ็ฅ
|
| 527 |
+
- 429 ้่ฏฏ๏ผไฝฟ็จๆถ้ดๅทๅด๏ผๆ้ขๅบฆ1ๅฐๆถ๏ผๆ ้ขๅบฆ10ๅฐๆถ๏ผ
|
| 528 |
+
- ๅ
ถไป้่ฏฏ๏ผไฝฟ็จๆฌกๆฐๅทๅด๏ผ5ๆฌก่ฏทๆฑ๏ผ
|
| 529 |
+
"""
|
| 530 |
+
try:
|
| 531 |
+
sso = self._extract_sso(auth_token)
|
| 532 |
+
if not sso:
|
| 533 |
+
return
|
| 534 |
+
|
| 535 |
+
_, data = self._find_token(sso)
|
| 536 |
+
if not data:
|
| 537 |
+
return
|
| 538 |
+
|
| 539 |
+
remaining = data.get("remainingQueries", -1)
|
| 540 |
+
|
| 541 |
+
if status_code == 429:
|
| 542 |
+
# 429 ไฝฟ็จๆถ้ดๅทๅด
|
| 543 |
+
if remaining > 0 or remaining == -1:
|
| 544 |
+
# ๆ้ขๅบฆ๏ผๅทๅด1ๅฐๆถ
|
| 545 |
+
cooldown_until = time.time() + COOLDOWN_429_WITH_QUOTA
|
| 546 |
+
logger.info(f"[Token] 429ๅทๅด(ๆ้ขๅบฆ): {sso[:10]}... ๅทๅด1ๅฐๆถ")
|
| 547 |
+
else:
|
| 548 |
+
# ๆ ้ขๅบฆ๏ผๅทๅด10ๅฐๆถ
|
| 549 |
+
cooldown_until = time.time() + COOLDOWN_429_NO_QUOTA
|
| 550 |
+
logger.info(f"[Token] 429ๅทๅด(ๆ ้ขๅบฆ): {sso[:10]}... ๅทๅด10ๅฐๆถ")
|
| 551 |
+
data["cooldownUntil"] = int(cooldown_until * 1000)
|
| 552 |
+
self._mark_dirty()
|
| 553 |
+
else:
|
| 554 |
+
# ๅ
ถไป้่ฏฏไฝฟ็จๆฌกๆฐๅทๅด๏ผๆ้ขๅบฆๆถๆๅทๅด๏ผ
|
| 555 |
+
if remaining != 0:
|
| 556 |
+
self._cooldown_counts[sso] = COOLDOWN_REQUESTS
|
| 557 |
+
logger.info(f"[Token] ๆฌกๆฐๅทๅด: {sso[:10]}... ๅทๅด{COOLDOWN_REQUESTS}ๆฌก่ฏทๆฑ")
|
| 558 |
+
|
| 559 |
+
except Exception as e:
|
| 560 |
+
logger.error(f"[Token] ๅบ็จๅทๅด้่ฏฏ: {e}")
|
| 561 |
+
|
| 562 |
+
async def refresh_all_limits(self) -> Dict[str, Any]:
|
| 563 |
+
"""ๅทๆฐๆๆ Token ็ๅฉไฝๆฌกๆฐ"""
|
| 564 |
+
# ๆฃๆฅๆฏๅฆๅทฒๅจๅทๆฐ
|
| 565 |
+
if self._refresh_lock:
|
| 566 |
+
return {"error": "refresh_in_progress", "message": "ๅทฒๆๅทๆฐไปปๅกๅจ่ฟ่กไธญ", "progress": self._refresh_progress}
|
| 567 |
+
|
| 568 |
+
# ่ทๅ้
|
| 569 |
+
self._refresh_lock = True
|
| 570 |
+
|
| 571 |
+
try:
|
| 572 |
+
# ่ฎก็ฎๆปๆฐ
|
| 573 |
+
all_tokens = []
|
| 574 |
+
for token_type in [TokenType.NORMAL.value, TokenType.SUPER.value]:
|
| 575 |
+
for sso in list(self.token_data[token_type].keys()):
|
| 576 |
+
all_tokens.append((token_type, sso))
|
| 577 |
+
|
| 578 |
+
total = len(all_tokens)
|
| 579 |
+
self._refresh_progress = {"running": True, "current": 0, "total": total, "success": 0, "failed": 0}
|
| 580 |
+
|
| 581 |
+
success_count = 0
|
| 582 |
+
fail_count = 0
|
| 583 |
+
|
| 584 |
+
for i, (token_type, sso) in enumerate(all_tokens):
|
| 585 |
+
auth_token = f"sso-rw={sso};sso={sso}"
|
| 586 |
+
try:
|
| 587 |
+
result = await self.check_limits(auth_token, "grok-4-fast")
|
| 588 |
+
if result:
|
| 589 |
+
success_count += 1
|
| 590 |
+
else:
|
| 591 |
+
fail_count += 1
|
| 592 |
+
except Exception as e:
|
| 593 |
+
logger.warning(f"[Token] ๅทๆฐๅคฑ่ดฅ: {sso[:10]}... - {e}")
|
| 594 |
+
fail_count += 1
|
| 595 |
+
|
| 596 |
+
# ๆดๆฐ่ฟๅบฆ
|
| 597 |
+
self._refresh_progress = {
|
| 598 |
+
"running": True,
|
| 599 |
+
"current": i + 1,
|
| 600 |
+
"total": total,
|
| 601 |
+
"success": success_count,
|
| 602 |
+
"failed": fail_count
|
| 603 |
+
}
|
| 604 |
+
await asyncio.sleep(0.1) # ้ฟๅ
่ฏทๆฑ่ฟๅฟซ
|
| 605 |
+
|
| 606 |
+
logger.info(f"[Token] ๆน้ๅทๆฐๅฎๆ: ๆๅ{success_count}, ๅคฑ่ดฅ{fail_count}")
|
| 607 |
+
self._refresh_progress = {"running": False, "current": total, "total": total, "success": success_count, "failed": fail_count}
|
| 608 |
+
return {"success": success_count, "failed": fail_count, "total": total}
|
| 609 |
+
|
| 610 |
+
finally:
|
| 611 |
+
self._refresh_lock = False
|
| 612 |
+
|
| 613 |
+
def get_refresh_progress(self) -> Dict[str, Any]:
|
| 614 |
+
"""่ทๅๅทๆฐ่ฟๅบฆ"""
|
| 615 |
+
return self._refresh_progress.copy()
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
# ๅ
จๅฑๅฎไพ
|
| 619 |
+
token_manager = GrokTokenManager()
|
app/services/grok/upload.py
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""ๅพ็ไธไผ ็ฎก็ๅจ - ๆฏๆBase64ๅURLๅพ็ไธไผ """
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import re
|
| 6 |
+
from typing import Tuple, Optional
|
| 7 |
+
from urllib.parse import urlparse
|
| 8 |
+
from curl_cffi.requests import AsyncSession
|
| 9 |
+
|
| 10 |
+
from app.services.grok.statsig import get_dynamic_headers
|
| 11 |
+
from app.core.exception import GrokApiException
|
| 12 |
+
from app.core.config import setting
|
| 13 |
+
from app.core.logger import logger
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# ๅธธ้
|
| 17 |
+
UPLOAD_API = "https://grok.com/rest/app-chat/upload-file"
|
| 18 |
+
TIMEOUT = 30
|
| 19 |
+
BROWSER = "chrome133a"
|
| 20 |
+
|
| 21 |
+
# MIME็ฑปๅ
|
| 22 |
+
MIME_TYPES = {
|
| 23 |
+
'.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.png': 'image/png',
|
| 24 |
+
'.gif': 'image/gif', '.webp': 'image/webp', '.bmp': 'image/bmp',
|
| 25 |
+
}
|
| 26 |
+
DEFAULT_MIME = "image/jpeg"
|
| 27 |
+
DEFAULT_EXT = "jpg"
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ImageUploadManager:
|
| 31 |
+
"""ๅพ็ไธไผ ็ฎก็ๅจ"""
|
| 32 |
+
|
| 33 |
+
@staticmethod
|
| 34 |
+
async def upload(image_input: str, auth_token: str) -> Tuple[str, str]:
|
| 35 |
+
"""ไธไผ ๅพ็๏ผๆฏๆBase64ๆURL๏ผ
|
| 36 |
+
|
| 37 |
+
Returns:
|
| 38 |
+
(file_id, file_uri) ๅ
็ป
|
| 39 |
+
"""
|
| 40 |
+
try:
|
| 41 |
+
# ๅคๆญ็ฑปๅๅนถๅค็
|
| 42 |
+
if ImageUploadManager._is_url(image_input):
|
| 43 |
+
buffer, mime = await ImageUploadManager._download(image_input)
|
| 44 |
+
filename, _ = ImageUploadManager._get_info("", mime)
|
| 45 |
+
else:
|
| 46 |
+
buffer = image_input.split(",")[1] if "data:image" in image_input else image_input
|
| 47 |
+
filename, mime = ImageUploadManager._get_info(image_input)
|
| 48 |
+
|
| 49 |
+
# ๆๅปบๆฐๆฎ
|
| 50 |
+
data = {
|
| 51 |
+
"fileName": filename,
|
| 52 |
+
"fileMimeType": mime,
|
| 53 |
+
"content": buffer,
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
if not auth_token:
|
| 58 |
+
raise GrokApiException("่ฎค่ฏไปค็็ผบๅคฑ", "NO_AUTH_TOKEN")
|
| 59 |
+
|
| 60 |
+
# ๅคๅฑ้่ฏ๏ผๅฏ้
็ฝฎ็ถๆ็ ๏ผ401/429็ญ๏ผ
|
| 61 |
+
retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
|
| 62 |
+
MAX_OUTER_RETRY = 3
|
| 63 |
+
|
| 64 |
+
for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎ้
้่ฏ3ๆฌก
|
| 65 |
+
try:
|
| 66 |
+
# ๅ
ๅฑ้่ฏ๏ผ403ไปฃ็ๆฑ ้่ฏ
|
| 67 |
+
max_403_retries = 5
|
| 68 |
+
retry_403_count = 0
|
| 69 |
+
|
| 70 |
+
while retry_403_count <= max_403_retries:
|
| 71 |
+
# ่ฏทๆฑ้
็ฝฎ
|
| 72 |
+
cf = setting.grok_config.get("cf_clearance", "")
|
| 73 |
+
headers = {
|
| 74 |
+
**get_dynamic_headers("/rest/app-chat/upload-file"),
|
| 75 |
+
"Cookie": f"{auth_token};{cf}" if cf else auth_token,
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
# ๅผๆญฅ่ทๅไปฃ็๏ผๆฏๆไปฃ็ๆฑ ๏ผ
|
| 79 |
+
from app.core.proxy_pool import proxy_pool
|
| 80 |
+
|
| 81 |
+
# ๅฆๆๆฏ403้่ฏไธไฝฟ็จไปฃ็ๆฑ ๏ผๅผบๅถๅทๆฐไปฃ็
|
| 82 |
+
if retry_403_count > 0 and proxy_pool._enabled:
|
| 83 |
+
logger.info(f"[Upload] 403้่ฏ {retry_403_count}/{max_403_retries}๏ผๅทๆฐไปฃ็...")
|
| 84 |
+
proxy = await proxy_pool.force_refresh()
|
| 85 |
+
else:
|
| 86 |
+
proxy = await setting.get_proxy_async("service")
|
| 87 |
+
|
| 88 |
+
proxies = {"http": proxy, "https": proxy} if proxy else None
|
| 89 |
+
|
| 90 |
+
# ไธไผ
|
| 91 |
+
async with AsyncSession() as session:
|
| 92 |
+
response = await session.post(
|
| 93 |
+
UPLOAD_API,
|
| 94 |
+
headers=headers,
|
| 95 |
+
json=data,
|
| 96 |
+
impersonate=BROWSER,
|
| 97 |
+
timeout=TIMEOUT,
|
| 98 |
+
proxies=proxies,
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
# ๅ
ๅฑ403้่ฏ๏ผไป
ๅฝๆไปฃ็ๆฑ ๆถ่งฆๅ
|
| 102 |
+
if response.status_code == 403 and proxy_pool._enabled:
|
| 103 |
+
retry_403_count += 1
|
| 104 |
+
|
| 105 |
+
if retry_403_count <= max_403_retries:
|
| 106 |
+
logger.warning(f"[Upload] ้ๅฐ403้่ฏฏ๏ผๆญฃๅจ้่ฏ ({retry_403_count}/{max_403_retries})...")
|
| 107 |
+
await asyncio.sleep(0.5)
|
| 108 |
+
continue
|
| 109 |
+
|
| 110 |
+
# ๅ
ๅฑ้่ฏๅ
จ้จๅคฑ่ดฅ
|
| 111 |
+
logger.error(f"[Upload] 403้่ฏฏ๏ผๅทฒ้่ฏ{retry_403_count-1}ๆฌก๏ผๆพๅผ")
|
| 112 |
+
|
| 113 |
+
# ๆฃๆฅๅฏ้
็ฝฎ็ถๆ็ ้่ฏฏ - ๅคๅฑ้่ฏ
|
| 114 |
+
if response.status_code in retry_codes:
|
| 115 |
+
if outer_retry < MAX_OUTER_RETRY:
|
| 116 |
+
delay = (outer_retry + 1) * 0.1 # ๆธ่ฟๅปถ่ฟ๏ผ0.1s, 0.2s, 0.3s
|
| 117 |
+
logger.warning(f"[Upload] ้ๅฐ{response.status_code}้่ฏฏ๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผ็ญๅพ
{delay}s...")
|
| 118 |
+
await asyncio.sleep(delay)
|
| 119 |
+
break # ่ทณๅบๅ
ๅฑๅพช็ฏ๏ผ่ฟๅ
ฅๅคๅฑ้่ฏ
|
| 120 |
+
else:
|
| 121 |
+
logger.error(f"[Upload] {response.status_code}้่ฏฏ๏ผๅทฒ้่ฏ{outer_retry}ๆฌก๏ผๆพๅผ")
|
| 122 |
+
return "", ""
|
| 123 |
+
|
| 124 |
+
if response.status_code == 200:
|
| 125 |
+
result = response.json()
|
| 126 |
+
file_id = result.get("fileMetadataId", "")
|
| 127 |
+
file_uri = result.get("fileUri", "")
|
| 128 |
+
|
| 129 |
+
if outer_retry > 0 or retry_403_count > 0:
|
| 130 |
+
logger.info(f"[Upload] ้่ฏๆๅ๏ผ")
|
| 131 |
+
|
| 132 |
+
logger.debug(f"[Upload] ๆๅ๏ผID: {file_id}")
|
| 133 |
+
return file_id, file_uri
|
| 134 |
+
|
| 135 |
+
# ๅ
ถไป้่ฏฏ็ดๆฅ่ฟๅ
|
| 136 |
+
logger.error(f"[Upload] ๅคฑ่ดฅ๏ผ็ถๆ็ : {response._status_code}")
|
| 137 |
+
return "", ""
|
| 138 |
+
|
| 139 |
+
# ๅ
ๅฑๅพช็ฏๆญฃๅธธ็ปๆ๏ผ้break๏ผ๏ผ่ฏดๆ403้่ฏๅ
จ้จๅคฑ่ดฅ
|
| 140 |
+
return "", ""
|
| 141 |
+
|
| 142 |
+
except Exception as e:
|
| 143 |
+
if outer_retry < MAX_OUTER_RETRY - 1:
|
| 144 |
+
logger.warning(f"[Upload] ๅผๅธธ: {e}๏ผๅคๅฑ้่ฏ ({outer_retry+1}/{MAX_OUTER_RETRY})...")
|
| 145 |
+
await asyncio.sleep(0.5)
|
| 146 |
+
continue
|
| 147 |
+
|
| 148 |
+
logger.warning(f"[Upload] ๅคฑ่ดฅ: {e}")
|
| 149 |
+
return "", ""
|
| 150 |
+
|
| 151 |
+
return "", ""
|
| 152 |
+
|
| 153 |
+
except Exception as e:
|
| 154 |
+
logger.warning(f"[Upload] ๅคฑ่ดฅ: {e}")
|
| 155 |
+
return "", ""
|
| 156 |
+
|
| 157 |
+
@staticmethod
|
| 158 |
+
def _is_url(input_str: str) -> bool:
|
| 159 |
+
"""ๆฃๆฅๆฏๅฆไธบURL"""
|
| 160 |
+
try:
|
| 161 |
+
result = urlparse(input_str)
|
| 162 |
+
return all([result.scheme, result.netloc]) and result.scheme in ['http', 'https']
|
| 163 |
+
except:
|
| 164 |
+
return False
|
| 165 |
+
|
| 166 |
+
@staticmethod
|
| 167 |
+
async def _download(url: str) -> Tuple[str, str]:
|
| 168 |
+
"""ไธ่ฝฝๅพ็ๅนถ่ฝฌBase64
|
| 169 |
+
|
| 170 |
+
Returns:
|
| 171 |
+
(base64_string, mime_type) ๅ
็ป
|
| 172 |
+
"""
|
| 173 |
+
try:
|
| 174 |
+
async with AsyncSession() as session:
|
| 175 |
+
response = await session.get(url, timeout=5)
|
| 176 |
+
response.raise_for_status()
|
| 177 |
+
|
| 178 |
+
content_type = response.headers.get('content-type', DEFAULT_MIME)
|
| 179 |
+
if not content_type.startswith('image/'):
|
| 180 |
+
content_type = DEFAULT_MIME
|
| 181 |
+
|
| 182 |
+
b64 = base64.b64encode(response.content).decode()
|
| 183 |
+
return b64, content_type
|
| 184 |
+
except Exception as e:
|
| 185 |
+
logger.warning(f"[Upload] ไธ่ฝฝๅคฑ่ดฅ: {e}")
|
| 186 |
+
return "", ""
|
| 187 |
+
|
| 188 |
+
@staticmethod
|
| 189 |
+
def _get_info(image_data: str, mime_type: Optional[str] = None) -> Tuple[str, str]:
|
| 190 |
+
"""่ทๅๆไปถๅๅMIME็ฑปๅ
|
| 191 |
+
|
| 192 |
+
Returns:
|
| 193 |
+
(file_name, mime_type) ๅ
็ป
|
| 194 |
+
"""
|
| 195 |
+
# ๅทฒๆไพMIME็ฑปๅ
|
| 196 |
+
if mime_type:
|
| 197 |
+
ext = mime_type.split("/")[1] if "/" in mime_type else DEFAULT_EXT
|
| 198 |
+
return f"image.{ext}", mime_type
|
| 199 |
+
|
| 200 |
+
# ไปBase64ๆๅ
|
| 201 |
+
mime = DEFAULT_MIME
|
| 202 |
+
ext = DEFAULT_EXT
|
| 203 |
+
|
| 204 |
+
if "data:image" in image_data:
|
| 205 |
+
if match := re.search(r"data:([a-zA-Z0-9]+/[a-zA-Z0-9-.+]+);base64,", image_data):
|
| 206 |
+
mime = match.group(1)
|
| 207 |
+
ext = mime.split("/")[1]
|
| 208 |
+
|
| 209 |
+
return f"image.{ext}", mime
|
app/services/mcp/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""MCPๆจกๅๅๅงๅ"""
|
| 3 |
+
|
| 4 |
+
from app.services.mcp.server import mcp
|
| 5 |
+
|
| 6 |
+
__all__ = ["mcp"]
|
app/services/mcp/server.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""FastMCPๆๅกๅจๅฎไพ"""
|
| 3 |
+
|
| 4 |
+
from fastmcp import FastMCP
|
| 5 |
+
from fastmcp.server.auth.providers.jwt import StaticTokenVerifier
|
| 6 |
+
from app.services.mcp.tools import ask_grok_impl
|
| 7 |
+
from app.core.config import setting
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def create_mcp_server() -> FastMCP:
|
| 11 |
+
"""ๅๅปบMCPๆๅกๅจๅฎไพ๏ผๅฆๆ้
็ฝฎไบAPIๅฏ้ฅๅๅฏ็จ่ฎค่ฏ"""
|
| 12 |
+
# ๆฃๆฅๆฏๅฆ้
็ฝฎไบAPIๅฏ้ฅ
|
| 13 |
+
api_key = setting.grok_config.get("api_key")
|
| 14 |
+
|
| 15 |
+
# ๅฆๆ้
็ฝฎไบAPIๅฏ้ฅ๏ผๅๅฏ็จ้ๆtoken้ช่ฏ
|
| 16 |
+
auth = None
|
| 17 |
+
if api_key:
|
| 18 |
+
auth = StaticTokenVerifier(
|
| 19 |
+
tokens={
|
| 20 |
+
api_key: {
|
| 21 |
+
"client_id": "grok2api-client",
|
| 22 |
+
"scopes": ["read", "write", "admin"]
|
| 23 |
+
}
|
| 24 |
+
},
|
| 25 |
+
required_scopes=["read"]
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
# ๅๅปบFastMCPๅฎไพ
|
| 29 |
+
return FastMCP(
|
| 30 |
+
name="Grok2API-MCP",
|
| 31 |
+
instructions="MCP server providing Grok AI chat capabilities. Use ask_grok tool to interact with Grok AI models.",
|
| 32 |
+
auth=auth
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
# ๅๅปบๅ
จๅฑMCPๅฎไพ
|
| 37 |
+
mcp = create_mcp_server()
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
# ๆณจๅask_grokๅทฅๅ
ท
|
| 41 |
+
@mcp.tool
|
| 42 |
+
async def ask_grok(
|
| 43 |
+
query: str,
|
| 44 |
+
model: str = "grok-3-fast",
|
| 45 |
+
system_prompt: str = None
|
| 46 |
+
) -> str:
|
| 47 |
+
"""
|
| 48 |
+
่ฐ็จGrok AI่ฟ่กๅฏน่ฏ๏ผๅฐคๅ
ถ้็จไบๅฝ็จๆท่ฏข้ฎๆๆฐไฟกๆฏ๏ผ้่ฆ่ฐ็จๆ็ดขๅ่ฝ๏ผๆๆฏๆณไบ่งฃ็คพไบคๅนณๅฐๅจๆ๏ผๅฆTwitter(X)ใReddit็ญ๏ผๆถใ
|
| 49 |
+
|
| 50 |
+
Args:
|
| 51 |
+
query: ็จๆท็้ฎ้ขๆๆไปค
|
| 52 |
+
model: Grokๆจกๅๅ็งฐ,ๅฏ้ๅผ: grok-3-fast(้ป่ฎค), grok-4-fast, grok-4-fast-expert, grok-4-expert, grok-4-heavy
|
| 53 |
+
system_prompt: ๅฏ้็็ณป็ปๆ็คบ่ฏ,็จไบ่ฎพๅฎAI็่ง่ฒๆ่กไธบ็บฆๆ
|
| 54 |
+
|
| 55 |
+
Returns:
|
| 56 |
+
Grok AI็ๅฎๆดๅๅคๅ
ๅฎน,ๅฏ่ฝๅ
ๆฌๆๆฌๅๅพ็้พๆฅ(Markdownๆ ผๅผ)
|
| 57 |
+
|
| 58 |
+
Examples:
|
| 59 |
+
- ็ฎๅ้ฎ็ญ: ask_grok("ไปไนๆฏPython?")
|
| 60 |
+
- ๆๅฎๆจกๅ: ask_grok("่งฃ้้ๅญ่ฎก็ฎ", model="grok-4-fast")
|
| 61 |
+
- ๅธฆ็ณป็ปๆ็คบ: ask_grok("ๅไธ้ฆ่ฏ", system_prompt="ไฝ ๆฏไธไฝๅคๅ
ธ่ฏไบบ")
|
| 62 |
+
"""
|
| 63 |
+
return await ask_grok_impl(query, model, system_prompt)
|
app/services/mcp/tools.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""MCP Tools - Grok AI ๅฏน่ฏๅทฅๅ
ท"""
|
| 3 |
+
|
| 4 |
+
import json
|
| 5 |
+
from typing import Optional
|
| 6 |
+
from app.services.grok.client import GrokClient
|
| 7 |
+
from app.core.logger import logger
|
| 8 |
+
from app.core.exception import GrokApiException
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
async def ask_grok_impl(
|
| 12 |
+
query: str,
|
| 13 |
+
model: str = "grok-3-fast",
|
| 14 |
+
system_prompt: Optional[str] = None
|
| 15 |
+
) -> str:
|
| 16 |
+
"""
|
| 17 |
+
ๅ
้จๅฎ็ฐ: ่ฐ็จGrok APIๅนถๆถ้ๅฎๆดๅๅบ
|
| 18 |
+
|
| 19 |
+
Args:
|
| 20 |
+
query: ็จๆท้ฎ้ข
|
| 21 |
+
model: ๆจกๅๅ็งฐ
|
| 22 |
+
system_prompt: ็ณป็ปๆ็คบ่ฏ
|
| 23 |
+
|
| 24 |
+
Returns:
|
| 25 |
+
str: ๅฎๆด็Grokๅๅบๅ
ๅฎน
|
| 26 |
+
"""
|
| 27 |
+
try:
|
| 28 |
+
# ๆๅปบๆถๆฏๅ่กจ
|
| 29 |
+
messages = []
|
| 30 |
+
if system_prompt:
|
| 31 |
+
messages.append({"role": "system", "content": system_prompt})
|
| 32 |
+
messages.append({"role": "user", "content": query})
|
| 33 |
+
|
| 34 |
+
# ๆๅปบ่ฏทๆฑ
|
| 35 |
+
request_data = {
|
| 36 |
+
"model": model,
|
| 37 |
+
"messages": messages,
|
| 38 |
+
"stream": True
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
logger.info(f"[MCP] ask_grok ่ฐ็จ, ๆจกๅ: {model}")
|
| 42 |
+
|
| 43 |
+
# ่ฐ็จGrokๅฎขๆท็ซฏ(ๆตๅผ)
|
| 44 |
+
response_iterator = await GrokClient.openai_to_grok(request_data)
|
| 45 |
+
|
| 46 |
+
# ๆถ้ๆๆๆตๅผๅๅบๅ
|
| 47 |
+
content_parts = []
|
| 48 |
+
async for chunk in response_iterator:
|
| 49 |
+
if isinstance(chunk, bytes):
|
| 50 |
+
chunk = chunk.decode('utf-8')
|
| 51 |
+
|
| 52 |
+
# ่งฃๆSSEๆ ผๅผ
|
| 53 |
+
if chunk.startswith("data: "):
|
| 54 |
+
data_str = chunk[6:].strip()
|
| 55 |
+
if data_str == "[DONE]":
|
| 56 |
+
break
|
| 57 |
+
|
| 58 |
+
try:
|
| 59 |
+
data = json.loads(data_str)
|
| 60 |
+
choices = data.get("choices", [])
|
| 61 |
+
if choices:
|
| 62 |
+
delta = choices[0].get("delta", {})
|
| 63 |
+
if content := delta.get("content"):
|
| 64 |
+
content_parts.append(content)
|
| 65 |
+
except json.JSONDecodeError:
|
| 66 |
+
continue
|
| 67 |
+
|
| 68 |
+
result = "".join(content_parts)
|
| 69 |
+
logger.info(f"[MCP] ask_grok ๅฎๆ, ๅๅบ้ฟๅบฆ: {len(result)}")
|
| 70 |
+
return result
|
| 71 |
+
|
| 72 |
+
except GrokApiException as e:
|
| 73 |
+
logger.error(f"[MCP] Grok API้่ฏฏ: {str(e)}")
|
| 74 |
+
raise Exception(f"Grok API่ฐ็จๅคฑ่ดฅ: {str(e)}")
|
| 75 |
+
except Exception as e:
|
| 76 |
+
logger.error(f"[MCP] ask_grokๅผๅธธ: {str(e)}", exc_info=True)
|
| 77 |
+
raise Exception(f"ๅค็่ฏทๆฑๆถๅบ้: {str(e)}")
|
app/services/request_logger.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""่ฏทๆฑๆฅๅฟๅฎก่ฎก - ่ฎฐๅฝ่ฟๆ่ฏทๆฑ"""
|
| 2 |
+
|
| 3 |
+
import time
|
| 4 |
+
import asyncio
|
| 5 |
+
import orjson
|
| 6 |
+
from typing import List, Dict, Deque
|
| 7 |
+
from collections import deque
|
| 8 |
+
from dataclasses import dataclass, asdict
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
|
| 11 |
+
from app.core.logger import logger
|
| 12 |
+
|
| 13 |
+
@dataclass
|
| 14 |
+
class RequestLog:
|
| 15 |
+
id: str
|
| 16 |
+
time: str
|
| 17 |
+
timestamp: float
|
| 18 |
+
ip: str
|
| 19 |
+
model: str
|
| 20 |
+
duration: float
|
| 21 |
+
status: int
|
| 22 |
+
key_name: str
|
| 23 |
+
token_suffix: str
|
| 24 |
+
error: str = ""
|
| 25 |
+
|
| 26 |
+
class RequestLogger:
|
| 27 |
+
"""่ฏทๆฑๆฅๅฟ่ฎฐๅฝๅจ"""
|
| 28 |
+
|
| 29 |
+
_instance = None
|
| 30 |
+
|
| 31 |
+
def __new__(cls):
|
| 32 |
+
if cls._instance is None:
|
| 33 |
+
cls._instance = super().__new__(cls)
|
| 34 |
+
return cls._instance
|
| 35 |
+
|
| 36 |
+
def __init__(self, max_len: int = 1000):
|
| 37 |
+
if hasattr(self, '_initialized'):
|
| 38 |
+
return
|
| 39 |
+
|
| 40 |
+
self.file_path = Path(__file__).parents[2] / "data" / "logs.json"
|
| 41 |
+
self._logs: Deque[Dict] = deque(maxlen=max_len)
|
| 42 |
+
self._lock = asyncio.Lock()
|
| 43 |
+
self._loaded = False
|
| 44 |
+
|
| 45 |
+
self._initialized = True
|
| 46 |
+
|
| 47 |
+
async def init(self):
|
| 48 |
+
"""ๅๅงๅๅ ่ฝฝๆฐๆฎ"""
|
| 49 |
+
if not self._loaded:
|
| 50 |
+
await self._load_data()
|
| 51 |
+
|
| 52 |
+
async def _load_data(self):
|
| 53 |
+
"""ไป็ฃ็ๅ ่ฝฝๆฅๅฟๆฐๆฎ"""
|
| 54 |
+
if self._loaded:
|
| 55 |
+
return
|
| 56 |
+
|
| 57 |
+
if not self.file_path.exists():
|
| 58 |
+
self._loaded = True
|
| 59 |
+
return
|
| 60 |
+
|
| 61 |
+
try:
|
| 62 |
+
async with self._lock:
|
| 63 |
+
content = await asyncio.to_thread(self.file_path.read_bytes)
|
| 64 |
+
if content:
|
| 65 |
+
data = orjson.loads(content)
|
| 66 |
+
if isinstance(data, list):
|
| 67 |
+
self._logs.clear()
|
| 68 |
+
self._logs.extend(data)
|
| 69 |
+
self._loaded = True
|
| 70 |
+
logger.debug(f"[Logger] ๅ ่ฝฝๆฅๅฟๆๅ: {len(self._logs)} ๆก")
|
| 71 |
+
except Exception as e:
|
| 72 |
+
logger.error(f"[Logger] ๅ ่ฝฝๆฅๅฟๅคฑ่ดฅ: {e}")
|
| 73 |
+
self._loaded = True
|
| 74 |
+
|
| 75 |
+
async def _save_data(self):
|
| 76 |
+
"""ไฟๅญๆฅๅฟๆฐๆฎๅฐ็ฃ็"""
|
| 77 |
+
if not self._loaded:
|
| 78 |
+
return
|
| 79 |
+
|
| 80 |
+
try:
|
| 81 |
+
# ็กฎไฟ็ฎๅฝๅญๅจ
|
| 82 |
+
self.file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 83 |
+
|
| 84 |
+
async with self._lock:
|
| 85 |
+
# ่ฝฌๆขไธบๅ่กจไฟๅญ
|
| 86 |
+
content = orjson.dumps(list(self._logs))
|
| 87 |
+
await asyncio.to_thread(self.file_path.write_bytes, content)
|
| 88 |
+
except Exception as e:
|
| 89 |
+
logger.error(f"[Logger] ไฟๅญๆฅๅฟๅคฑ่ดฅ: {e}")
|
| 90 |
+
|
| 91 |
+
async def add_log(self,
|
| 92 |
+
ip: str,
|
| 93 |
+
model: str,
|
| 94 |
+
duration: float,
|
| 95 |
+
status: int,
|
| 96 |
+
key_name: str,
|
| 97 |
+
token_suffix: str = "",
|
| 98 |
+
error: str = ""):
|
| 99 |
+
"""ๆทปๅ ๆฅๅฟ"""
|
| 100 |
+
if not self._loaded:
|
| 101 |
+
await self.init()
|
| 102 |
+
|
| 103 |
+
try:
|
| 104 |
+
now = time.time()
|
| 105 |
+
# ๆ ผๅผๅๆถ้ด
|
| 106 |
+
time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(now))
|
| 107 |
+
|
| 108 |
+
log = {
|
| 109 |
+
"id": str(int(now * 1000)),
|
| 110 |
+
"time": time_str,
|
| 111 |
+
"timestamp": now,
|
| 112 |
+
"ip": ip,
|
| 113 |
+
"model": model,
|
| 114 |
+
"duration": round(duration, 2),
|
| 115 |
+
"status": status,
|
| 116 |
+
"key_name": key_name,
|
| 117 |
+
"token_suffix": token_suffix,
|
| 118 |
+
"error": error
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
async with self._lock:
|
| 122 |
+
self._logs.appendleft(log) # ๆๆฐ็ๅจๅ
|
| 123 |
+
|
| 124 |
+
# ๅผๆญฅไฟๅญ
|
| 125 |
+
asyncio.create_task(self._save_data())
|
| 126 |
+
|
| 127 |
+
except Exception as e:
|
| 128 |
+
logger.error(f"[Logger] ่ฎฐๅฝๆฅๅฟๅคฑ่ดฅ: {e}")
|
| 129 |
+
|
| 130 |
+
async def get_logs(self, limit: int = 1000) -> List[Dict]:
|
| 131 |
+
"""่ทๅๆฅๅฟ"""
|
| 132 |
+
async with self._lock:
|
| 133 |
+
return list(self._logs)[:limit]
|
| 134 |
+
|
| 135 |
+
async def clear_logs(self):
|
| 136 |
+
"""ๆธ
็ฉบๆฅๅฟ"""
|
| 137 |
+
async with self._lock:
|
| 138 |
+
self._logs.clear()
|
| 139 |
+
await self._save_data()
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
# ๅ
จๅฑๅฎไพ
|
| 143 |
+
request_logger = RequestLogger()
|
app/services/request_stats.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""่ฏทๆฑ็ป่ฎกๆจกๅ - ๆๅฐๆถ/ๅคฉ็ป่ฎก่ฏทๆฑๆฐๆฎ"""
|
| 2 |
+
|
| 3 |
+
import time
|
| 4 |
+
import asyncio
|
| 5 |
+
import orjson
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from typing import Dict, Any
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from collections import defaultdict
|
| 10 |
+
|
| 11 |
+
from app.core.logger import logger
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class RequestStats:
|
| 15 |
+
"""่ฏทๆฑ็ป่ฎก็ฎก็ๅจ๏ผๅไพ๏ผ"""
|
| 16 |
+
|
| 17 |
+
_instance = None
|
| 18 |
+
|
| 19 |
+
def __new__(cls):
|
| 20 |
+
if cls._instance is None:
|
| 21 |
+
cls._instance = super().__new__(cls)
|
| 22 |
+
return cls._instance
|
| 23 |
+
|
| 24 |
+
def __init__(self):
|
| 25 |
+
if hasattr(self, '_initialized'):
|
| 26 |
+
return
|
| 27 |
+
|
| 28 |
+
self.file_path = Path(__file__).parents[2] / "data" / "stats.json"
|
| 29 |
+
|
| 30 |
+
# ็ป่ฎกๆฐๆฎ
|
| 31 |
+
self._hourly: Dict[str, Dict[str, int]] = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
|
| 32 |
+
self._daily: Dict[str, Dict[str, int]] = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
|
| 33 |
+
self._models: Dict[str, int] = defaultdict(int)
|
| 34 |
+
|
| 35 |
+
# ไฟ็็ญ็ฅ
|
| 36 |
+
self._hourly_keep = 48 # ไฟ็48ๅฐๆถ
|
| 37 |
+
self._daily_keep = 30 # ไฟ็30ๅคฉ
|
| 38 |
+
|
| 39 |
+
self._lock = asyncio.Lock()
|
| 40 |
+
self._loaded = False
|
| 41 |
+
self._initialized = True
|
| 42 |
+
|
| 43 |
+
async def init(self):
|
| 44 |
+
"""ๅๅงๅๅ ่ฝฝๆฐๆฎ"""
|
| 45 |
+
if not self._loaded:
|
| 46 |
+
await self._load_data()
|
| 47 |
+
|
| 48 |
+
async def _load_data(self):
|
| 49 |
+
"""ไป็ฃ็ๅ ่ฝฝ็ป่ฎกๆฐๆฎ"""
|
| 50 |
+
if self._loaded:
|
| 51 |
+
return
|
| 52 |
+
|
| 53 |
+
if not self.file_path.exists():
|
| 54 |
+
self._loaded = True
|
| 55 |
+
return
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
async with self._lock:
|
| 59 |
+
content = await asyncio.to_thread(self.file_path.read_bytes)
|
| 60 |
+
if content:
|
| 61 |
+
data = orjson.loads(content)
|
| 62 |
+
|
| 63 |
+
# ๆขๅค defaultdict ็ปๆ
|
| 64 |
+
self._hourly = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
|
| 65 |
+
self._hourly.update(data.get("hourly", {}))
|
| 66 |
+
|
| 67 |
+
self._daily = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
|
| 68 |
+
self._daily.update(data.get("daily", {}))
|
| 69 |
+
|
| 70 |
+
self._models = defaultdict(int)
|
| 71 |
+
self._models.update(data.get("models", {}))
|
| 72 |
+
|
| 73 |
+
self._loaded = True
|
| 74 |
+
logger.debug(f"[Stats] ๅ ่ฝฝ็ป่ฎกๆฐๆฎๆๅ")
|
| 75 |
+
except Exception as e:
|
| 76 |
+
logger.error(f"[Stats] ๅ ่ฝฝๆฐๆฎๅคฑ่ดฅ: {e}")
|
| 77 |
+
self._loaded = True # ้ฒๆญข่ฆ็
|
| 78 |
+
|
| 79 |
+
async def _save_data(self):
|
| 80 |
+
"""ไฟๅญ็ป่ฎกๆฐๆฎๅฐ็ฃ็"""
|
| 81 |
+
if not self._loaded:
|
| 82 |
+
return
|
| 83 |
+
|
| 84 |
+
try:
|
| 85 |
+
# ็กฎไฟ็ฎๅฝๅญๅจ
|
| 86 |
+
self.file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 87 |
+
|
| 88 |
+
async with self._lock:
|
| 89 |
+
data = {
|
| 90 |
+
"hourly": dict(self._hourly),
|
| 91 |
+
"daily": dict(self._daily),
|
| 92 |
+
"models": dict(self._models)
|
| 93 |
+
}
|
| 94 |
+
content = orjson.dumps(data)
|
| 95 |
+
await asyncio.to_thread(self.file_path.write_bytes, content)
|
| 96 |
+
except Exception as e:
|
| 97 |
+
logger.error(f"[Stats] ไฟๅญๆฐๆฎๅคฑ่ดฅ: {e}")
|
| 98 |
+
|
| 99 |
+
async def record_request(self, model: str, success: bool) -> None:
|
| 100 |
+
"""่ฎฐๅฝไธๆฌก่ฏทๆฑ"""
|
| 101 |
+
if not self._loaded:
|
| 102 |
+
await self.init()
|
| 103 |
+
|
| 104 |
+
now = datetime.now()
|
| 105 |
+
hour_key = now.strftime("%Y-%m-%dT%H")
|
| 106 |
+
day_key = now.strftime("%Y-%m-%d")
|
| 107 |
+
|
| 108 |
+
# ๅฐๆถ็ป่ฎก
|
| 109 |
+
self._hourly[hour_key]["total"] += 1
|
| 110 |
+
if success:
|
| 111 |
+
self._hourly[hour_key]["success"] += 1
|
| 112 |
+
else:
|
| 113 |
+
self._hourly[hour_key]["failed"] += 1
|
| 114 |
+
|
| 115 |
+
# ๅคฉ็ป่ฎก
|
| 116 |
+
self._daily[day_key]["total"] += 1
|
| 117 |
+
if success:
|
| 118 |
+
self._daily[day_key]["success"] += 1
|
| 119 |
+
else:
|
| 120 |
+
self._daily[day_key]["failed"] += 1
|
| 121 |
+
|
| 122 |
+
# ๆจกๅ็ป่ฎก
|
| 123 |
+
self._models[model] += 1
|
| 124 |
+
|
| 125 |
+
# ๅฎๆๆธ
็ๆงๆฐๆฎ
|
| 126 |
+
self._cleanup()
|
| 127 |
+
|
| 128 |
+
# ๅผๆญฅไฟๅญ
|
| 129 |
+
asyncio.create_task(self._save_data())
|
| 130 |
+
|
| 131 |
+
def _cleanup(self) -> None:
|
| 132 |
+
"""ๆธ
็่ฟๆๆฐๆฎ"""
|
| 133 |
+
now = datetime.now()
|
| 134 |
+
|
| 135 |
+
# ๆธ
็ๅฐๆถๆฐๆฎ
|
| 136 |
+
hour_keys = list(self._hourly.keys())
|
| 137 |
+
if len(hour_keys) > self._hourly_keep:
|
| 138 |
+
for key in sorted(hour_keys)[:-self._hourly_keep]:
|
| 139 |
+
del self._hourly[key]
|
| 140 |
+
|
| 141 |
+
# ๆธ
็ๅคฉๆฐๆฎ
|
| 142 |
+
day_keys = list(self._daily.keys())
|
| 143 |
+
if len(day_keys) > self._daily_keep:
|
| 144 |
+
for key in sorted(day_keys)[:-self._daily_keep]:
|
| 145 |
+
del self._daily[key]
|
| 146 |
+
|
| 147 |
+
def get_stats(self, hours: int = 24, days: int = 7) -> Dict[str, Any]:
|
| 148 |
+
"""่ทๅ็ป่ฎกๆฐๆฎ"""
|
| 149 |
+
now = datetime.now()
|
| 150 |
+
|
| 151 |
+
# ่ทๅๆ่ฟNๅฐๆถๆฐๆฎ
|
| 152 |
+
hourly_data = []
|
| 153 |
+
for i in range(hours - 1, -1, -1):
|
| 154 |
+
from datetime import timedelta
|
| 155 |
+
dt = now - timedelta(hours=i)
|
| 156 |
+
key = dt.strftime("%Y-%m-%dT%H")
|
| 157 |
+
data = self._hourly.get(key, {"total": 0, "success": 0, "failed": 0})
|
| 158 |
+
hourly_data.append({
|
| 159 |
+
"hour": dt.strftime("%H:00"),
|
| 160 |
+
"date": dt.strftime("%m-%d"),
|
| 161 |
+
**data
|
| 162 |
+
})
|
| 163 |
+
|
| 164 |
+
# ่ทๅๆ่ฟNๅคฉๆฐๆฎ
|
| 165 |
+
daily_data = []
|
| 166 |
+
for i in range(days - 1, -1, -1):
|
| 167 |
+
from datetime import timedelta
|
| 168 |
+
dt = now - timedelta(days=i)
|
| 169 |
+
key = dt.strftime("%Y-%m-%d")
|
| 170 |
+
data = self._daily.get(key, {"total": 0, "success": 0, "failed": 0})
|
| 171 |
+
daily_data.append({
|
| 172 |
+
"date": dt.strftime("%m-%d"),
|
| 173 |
+
**data
|
| 174 |
+
})
|
| 175 |
+
|
| 176 |
+
# ๆจกๅ็ป่ฎก๏ผๅ Top 10๏ผ
|
| 177 |
+
model_data = sorted(self._models.items(), key=lambda x: x[1], reverse=True)[:10]
|
| 178 |
+
|
| 179 |
+
# ๆป่ฎก
|
| 180 |
+
total_requests = sum(d["total"] for d in self._hourly.values())
|
| 181 |
+
total_success = sum(d["success"] for d in self._hourly.values())
|
| 182 |
+
total_failed = sum(d["failed"] for d in self._hourly.values())
|
| 183 |
+
|
| 184 |
+
return {
|
| 185 |
+
"hourly": hourly_data,
|
| 186 |
+
"daily": daily_data,
|
| 187 |
+
"models": [{"model": m, "count": c} for m, c in model_data],
|
| 188 |
+
"summary": {
|
| 189 |
+
"total": total_requests,
|
| 190 |
+
"success": total_success,
|
| 191 |
+
"failed": total_failed,
|
| 192 |
+
"success_rate": round(total_success / total_requests * 100, 1) if total_requests > 0 else 0
|
| 193 |
+
}
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
async def reset(self) -> None:
|
| 197 |
+
"""้็ฝฎๆๆ็ป่ฎก"""
|
| 198 |
+
self._hourly.clear()
|
| 199 |
+
self._daily.clear()
|
| 200 |
+
self._models.clear()
|
| 201 |
+
await self._save_data()
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
# ๅ
จๅฑๅฎไพ
|
| 205 |
+
request_stats = RequestStats()
|
app/template/admin.html
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
app/template/favicon.png
ADDED
|
|
Git LFS Details
|
app/template/login.html
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="zh-CN" class="h-full">
|
| 3 |
+
|
| 4 |
+
<head>
|
| 5 |
+
<meta charset="UTF-8">
|
| 6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 7 |
+
<title>็ปๅฝ - Grok2API</title>
|
| 8 |
+
<link rel="icon" type="image/png" href="/static/favicon.png">
|
| 9 |
+
<script src="https://cdn.tailwindcss.com"></script>
|
| 10 |
+
<script>
|
| 11 |
+
tailwind.config = { theme: { extend: { colors: { border: "hsl(0 0% 89%)", input: "hsl(0 0% 89%)", ring: "hsl(0 0% 3.9%)", background: "hsl(0 0% 100%)", foreground: "hsl(0 0% 3.9%)", primary: { DEFAULT: "hsl(0 0% 9%)", foreground: "hsl(0 0% 98%)" }, secondary: { DEFAULT: "hsl(0 0% 96.1%)", foreground: "hsl(0 0% 9%)" }, muted: { DEFAULT: "hsl(0 0% 96.1%)", foreground: "hsl(0 0% 45.1%)" }, destructive: { DEFAULT: "hsl(0 84.2% 60.2%)", foreground: "hsl(0 0% 98%)" } } } } }
|
| 12 |
+
</script>
|
| 13 |
+
<style>
|
| 14 |
+
@keyframes slide-up {
|
| 15 |
+
from {
|
| 16 |
+
transform: translateY(100%);
|
| 17 |
+
opacity: 0
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
to {
|
| 21 |
+
transform: translateY(0);
|
| 22 |
+
opacity: 1
|
| 23 |
+
}
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
.animate-slide-up {
|
| 27 |
+
animation: slide-up .3s ease-out
|
| 28 |
+
}
|
| 29 |
+
</style>
|
| 30 |
+
</head>
|
| 31 |
+
|
| 32 |
+
<body class="h-full bg-background text-foreground antialiased">
|
| 33 |
+
<div class="flex min-h-full flex-col justify-center py-12 px-4 sm:px-6 lg:px-8">
|
| 34 |
+
<div class="sm:mx-auto sm:w-full sm:max-w-md">
|
| 35 |
+
<div class="text-center">
|
| 36 |
+
<h1 class="text-4xl font-bold">Grok2API</h1>
|
| 37 |
+
<p class="mt-2 text-sm text-muted-foreground">็ฎก็ๅๆงๅถๅฐ</p>
|
| 38 |
+
</div>
|
| 39 |
+
</div>
|
| 40 |
+
|
| 41 |
+
<div class="sm:mx-auto sm:w-full sm:max-w-md">
|
| 42 |
+
<div class="bg-background py-8 px-4 sm:px-10 rounded-lg">
|
| 43 |
+
<form id="loginForm" class="space-y-6">
|
| 44 |
+
<div class="space-y-2">
|
| 45 |
+
<label for="username" class="text-sm font-medium">่ดฆๆท</label>
|
| 46 |
+
<input type="text" id="username" name="username" required
|
| 47 |
+
class="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring disabled:opacity-50"
|
| 48 |
+
placeholder="่ฏท่พๅ
ฅ่ดฆๆท">
|
| 49 |
+
</div>
|
| 50 |
+
<div class="space-y-2">
|
| 51 |
+
<label for="password" class="text-sm font-medium">ๅฏ็ </label>
|
| 52 |
+
<input type="password" id="password" name="password" required
|
| 53 |
+
class="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring disabled:opacity-50"
|
| 54 |
+
placeholder="่ฏท่พๅ
ฅๅฏ็ ">
|
| 55 |
+
</div>
|
| 56 |
+
<button type="submit" id="loginButton"
|
| 57 |
+
class="inline-flex items-center justify-center rounded-md font-medium transition-colors bg-primary text-primary-foreground hover:bg-primary/90 h-10 w-full disabled:opacity-50">็ปๅฝ</button>
|
| 58 |
+
</form>
|
| 59 |
+
|
| 60 |
+
<div class="mt-6 text-center text-xs text-muted-foreground space-y-1">
|
| 61 |
+
<p>Created By Chenyme ยฉ 2025</p>
|
| 62 |
+
<p>Fork ็ปดๆค: @Tomiya233</p>
|
| 63 |
+
</div>
|
| 64 |
+
</div>
|
| 65 |
+
</div>
|
| 66 |
+
</div>
|
| 67 |
+
|
| 68 |
+
<script>
|
| 69 |
+
const form = document.getElementById('loginForm'), btn = document.getElementById('loginButton');
|
| 70 |
+
form.addEventListener('submit', async (e) => { e.preventDefault(); btn.disabled = true; btn.textContent = '็ปๅฝไธญ...'; try { const fd = new FormData(form), r = await fetch('/api/login', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ username: fd.get('username'), password: fd.get('password') }) }); const d = await r.json(); d.success ? (localStorage.setItem('adminToken', d.token), location.href = '/manage') : showToast(d.message || '็ปๅฝๅคฑ่ดฅ', 'error') } catch (e) { showToast('็ฝ็ป้่ฏฏ๏ผ่ฏท็จๅ้่ฏ', 'error') } finally { btn.disabled = false; btn.textContent = '็ปๅฝ' } });
|
| 71 |
+
function showToast(m, t = 'error') { const d = document.createElement('div'), bc = { success: 'bg-green-600', error: 'bg-destructive', info: 'bg-primary' }; d.className = `fixed bottom-4 right-4 ${bc[t] || bc.error} text-white px-4 py-2.5 rounded-lg shadow-lg text-sm font-medium z-50 animate-slide-up`; d.textContent = m; document.body.appendChild(d); setTimeout(() => { d.style.opacity = '0'; d.style.transition = 'opacity .3s'; setTimeout(() => d.parentNode && document.body.removeChild(d), 300) }, 2000) }
|
| 72 |
+
window.addEventListener('DOMContentLoaded', () => { const t = localStorage.getItem('adminToken'); t && fetch('/api/stats', { headers: { Authorization: `Bearer ${t}` } }).then(r => { if (r.ok) location.href = '/manage' }) });
|
| 73 |
+
</script>
|
| 74 |
+
</body>
|
| 75 |
+
|
| 76 |
+
</html>
|
data/setting.toml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[grok]
|
| 2 |
+
api_key = ""
|
| 3 |
+
proxy_url = ""
|
| 4 |
+
cache_proxy_url = ""
|
| 5 |
+
cf_clearance = ""
|
| 6 |
+
x_statsig_id = "ZTpUeXBlRXJyb3I6IENhbm5vdCByZWFkIHByb3BlcnRpZXMgb2YgdW5kZWZpbmVkIChyZWFkaW5nICdjaGlsZE5vZGVzJyk="
|
| 7 |
+
filtered_tags = "xaiartifact,xai:tool_usage_card,grok:render"
|
| 8 |
+
stream_chunk_timeout = 120
|
| 9 |
+
stream_total_timeout = 600
|
| 10 |
+
stream_first_response_timeout = 30
|
| 11 |
+
temporary = true
|
| 12 |
+
show_thinking = true
|
| 13 |
+
dynamic_statsig = true
|
| 14 |
+
proxy_pool_url = ""
|
| 15 |
+
proxy_pool_interval = 300
|
| 16 |
+
retry_status_codes = [ 401, 429,]
|
| 17 |
+
|
| 18 |
+
[global]
|
| 19 |
+
base_url = ""
|
| 20 |
+
log_level = "INFO"
|
| 21 |
+
image_mode = "url"
|
| 22 |
+
admin_password = "admin"
|
| 23 |
+
admin_username = "admin"
|
| 24 |
+
image_cache_max_size_mb = 512
|
| 25 |
+
video_cache_max_size_mb = 1024
|
data/temp/image.temp
ADDED
|
File without changes
|
data/temp/video/users-8522ce45-679b-4e0e-a0f7-bb18f434eb6b-generated-15f7113f-5d16-4ff1-bdaa-a2eabd66671c-generated_video.mp4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9fc94cc9cb116f2e561baa62d006d5a8c96a30dc7671acbcbcf7f4b6de11696a
|
| 3 |
+
size 313620
|
data/token.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"ssoSuper": {},
|
| 3 |
+
"ssoNormal": {}
|
| 4 |
+
}
|
docker-compose.yml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
services:
|
| 2 |
+
grok2api:
|
| 3 |
+
image: ghcr.io/chenyme/grok2api:latest
|
| 4 |
+
ports:
|
| 5 |
+
- "8000:8000"
|
| 6 |
+
volumes:
|
| 7 |
+
- grok_data:/app/data
|
| 8 |
+
- ./logs:/app/logs
|
| 9 |
+
environment:
|
| 10 |
+
# ===== ๅญๅจๆจกๅผ =====
|
| 11 |
+
# ๆฏๆ file, mysql ๆ redis
|
| 12 |
+
- STORAGE_MODE=file
|
| 13 |
+
|
| 14 |
+
# ===== ๆฐๆฎๅบ =====
|
| 15 |
+
# ไป
ๅจSTORAGE_MODE=mysqlๆredisๆถ้่ฆ
|
| 16 |
+
# - DATABASE_URL=mysql://user:password@host:3306/grok2api
|
| 17 |
+
# MySQLๆ ผๅผ: mysql://user:password@host:port/database
|
| 18 |
+
# Redisๆ ผๅผ: redis://host:port/db ๆ redis://user:password@host:port/db
|
| 19 |
+
|
| 20 |
+
# ===== Workerๆฐ้ =====
|
| 21 |
+
# ้ป่ฎค1๏ผๅปบ่ฎฎ๏ผCPUๆ ธๅฟๆฐ * 2๏ผๅค่ฟ็จๆจกๅผไธๅปบ่ฎฎไฝฟ็จMySQL/Redisๅญๅจ
|
| 22 |
+
- WORKERS=1
|
| 23 |
+
|
| 24 |
+
volumes:
|
| 25 |
+
grok_data:
|
docker-entrypoint.sh
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/sh
|
| 2 |
+
set -e
|
| 3 |
+
|
| 4 |
+
# ๅๅงๅ้
็ฝฎๆไปถ๏ผๅฆๆไธๅญๅจ๏ผ
|
| 5 |
+
echo "[Grok2API] ๆฃๆฅ้
็ฝฎๆไปถ..."
|
| 6 |
+
|
| 7 |
+
# ็กฎไฟๆฐๆฎ็ฎๅฝๅญๅจ
|
| 8 |
+
mkdir -p /app/data/temp/image /app/data/temp/video /app/logs
|
| 9 |
+
|
| 10 |
+
# ๅฆๆ setting.toml ไธๅญๅจ๏ผๅๅปบ้ป่ฎค้
็ฝฎ
|
| 11 |
+
if [ ! -f /app/data/setting.toml ]; then
|
| 12 |
+
echo "[Grok2API] ๅๅงๅ setting.toml..."
|
| 13 |
+
cat > /app/data/setting.toml << 'EOF'
|
| 14 |
+
[global]
|
| 15 |
+
base_url = "http://localhost:8000"
|
| 16 |
+
log_level = "INFO"
|
| 17 |
+
image_mode = "url"
|
| 18 |
+
admin_password = "admin"
|
| 19 |
+
admin_username = "admin"
|
| 20 |
+
image_cache_max_size_mb = 512
|
| 21 |
+
video_cache_max_size_mb = 1024
|
| 22 |
+
max_upload_concurrency = 20
|
| 23 |
+
max_request_concurrency = 50
|
| 24 |
+
batch_save_interval = 1.0
|
| 25 |
+
batch_save_threshold = 10
|
| 26 |
+
|
| 27 |
+
[grok]
|
| 28 |
+
api_key = ""
|
| 29 |
+
proxy_url = ""
|
| 30 |
+
cache_proxy_url = ""
|
| 31 |
+
cf_clearance = ""
|
| 32 |
+
x_statsig_id = ""
|
| 33 |
+
dynamic_statsig = true
|
| 34 |
+
filtered_tags = "xaiartifact,xai:tool_usage_card,grok:render"
|
| 35 |
+
stream_chunk_timeout = 120
|
| 36 |
+
stream_total_timeout = 600
|
| 37 |
+
stream_first_response_timeout = 30
|
| 38 |
+
temporary = true
|
| 39 |
+
show_thinking = true
|
| 40 |
+
proxy_pool_url = ""
|
| 41 |
+
proxy_pool_interval = 300
|
| 42 |
+
retry_status_codes = [401, 429]
|
| 43 |
+
EOF
|
| 44 |
+
fi
|
| 45 |
+
|
| 46 |
+
# ๅฆๆ token.json ไธๅญๅจ๏ผๅๅปบ็ฉบtokenๆไปถ
|
| 47 |
+
if [ ! -f /app/data/token.json ]; then
|
| 48 |
+
echo "[Grok2API] ๅๅงๅ token.json..."
|
| 49 |
+
echo '{"ssoNormal": {}, "ssoSuper": {}}' > /app/data/token.json
|
| 50 |
+
fi
|
| 51 |
+
|
| 52 |
+
echo "[Grok2API] ้
็ฝฎๆไปถๆฃๆฅๅฎๆ"
|
| 53 |
+
echo "[Grok2API] ๅฏๅจๅบ็จ..."
|
| 54 |
+
|
| 55 |
+
# ๆง่กไผ ๅ
ฅ็ๅฝไปค
|
| 56 |
+
exec "$@"
|
main.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Grok2API"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from contextlib import asynccontextmanager
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
|
| 8 |
+
from fastapi import FastAPI
|
| 9 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 10 |
+
from fastapi.staticfiles import StaticFiles
|
| 11 |
+
from app.core.logger import logger
|
| 12 |
+
from app.core.exception import register_exception_handlers
|
| 13 |
+
from app.core.storage import storage_manager
|
| 14 |
+
from app.core.config import setting
|
| 15 |
+
from app.services.grok.token import token_manager
|
| 16 |
+
from app.api.v1.chat import router as chat_router
|
| 17 |
+
from app.api.v1.models import router as models_router
|
| 18 |
+
from app.api.v1.images import router as images_router
|
| 19 |
+
from app.api.admin.manage import router as admin_router
|
| 20 |
+
from app.services.mcp import mcp
|
| 21 |
+
|
| 22 |
+
# 0. ๅ
ผๅฎนๆงๆฃๆต
|
| 23 |
+
try:
|
| 24 |
+
if sys.platform != 'win32':
|
| 25 |
+
import uvloop
|
| 26 |
+
uvloop.install()
|
| 27 |
+
logger.info("[Grok2API] ๅฏ็จuvloop้ซๆง่ฝไบไปถๅพช็ฏ")
|
| 28 |
+
else:
|
| 29 |
+
logger.info("[Grok2API] Windows็ณป็ป๏ผไฝฟ็จ้ป่ฎคasyncioไบไปถๅพช็ฏ")
|
| 30 |
+
except ImportError:
|
| 31 |
+
logger.info("[Grok2API] uvloopๆชๅฎ่ฃ
๏ผไฝฟ็จ้ป่ฎคasyncioไบไปถๅพช็ฏ")
|
| 32 |
+
|
| 33 |
+
# 1. ๅๅปบMCP็FastAPIๅบ็จๅฎไพ
|
| 34 |
+
mcp_app = mcp.http_app(stateless_http=True, transport="streamable-http")
|
| 35 |
+
|
| 36 |
+
# 2. ๅฎไนๅบ็จ็ๅฝๅจๆ
|
| 37 |
+
@asynccontextmanager
|
| 38 |
+
async def lifespan(app: FastAPI):
|
| 39 |
+
"""
|
| 40 |
+
ๅฏๅจ้กบๅบ:
|
| 41 |
+
1. ๅๅงๅๆ ธๅฟๆๅก (storage, settings, token_manager)
|
| 42 |
+
2. ๅผๆญฅๅ ่ฝฝ token ๆฐๆฎ
|
| 43 |
+
3. ๅฏๅจๆน้ไฟๅญไปปๅก
|
| 44 |
+
4. ๅฏๅจMCPๆๅก็ๅฝๅจๆ
|
| 45 |
+
|
| 46 |
+
ๅ
ณ้ญ้กบๅบ (LIFO):
|
| 47 |
+
1. ๅ
ณ้ญMCPๆๅก็ๅฝๅจๆ
|
| 48 |
+
2. ๅ
ณ้ญๆน้ไฟๅญไปปๅกๅนถๅทๆฐๆฐๆฎ
|
| 49 |
+
3. ๅ
ณ้ญๆ ธๅฟๆๅก
|
| 50 |
+
"""
|
| 51 |
+
# --- ๅฏๅจ่ฟ็จ ---
|
| 52 |
+
# 1. ๅๅงๅๆ ธๅฟๆๅก
|
| 53 |
+
await storage_manager.init()
|
| 54 |
+
|
| 55 |
+
# ่ฎพ็ฝฎๅญๅจๅฐ้
็ฝฎๅtoken็ฎก็ๅจ
|
| 56 |
+
storage = storage_manager.get_storage()
|
| 57 |
+
setting.set_storage(storage)
|
| 58 |
+
token_manager.set_storage(storage)
|
| 59 |
+
|
| 60 |
+
# 2. ้ๆฐๅ ่ฝฝ้
็ฝฎ
|
| 61 |
+
await setting.reload()
|
| 62 |
+
logger.info("[Grok2API] ๆ ธๅฟๆๅกๅๅงๅๅฎๆ")
|
| 63 |
+
|
| 64 |
+
# 2.5. ๅๅงๅไปฃ็ๆฑ
|
| 65 |
+
from app.core.proxy_pool import proxy_pool
|
| 66 |
+
proxy_url = setting.grok_config.get("proxy_url", "")
|
| 67 |
+
proxy_pool_url = setting.grok_config.get("proxy_pool_url", "")
|
| 68 |
+
proxy_pool_interval = setting.grok_config.get("proxy_pool_interval", 300)
|
| 69 |
+
proxy_pool.configure(proxy_url, proxy_pool_url, proxy_pool_interval)
|
| 70 |
+
|
| 71 |
+
# 3. ๅผๆญฅๅ ่ฝฝ token ๆฐๆฎ
|
| 72 |
+
await token_manager._load_data()
|
| 73 |
+
logger.info("[Grok2API] Tokenๆฐๆฎๅ ่ฝฝๅฎๆ")
|
| 74 |
+
|
| 75 |
+
# 3.5. ๅ ่ฝฝ API Key ๆฐๆฎ
|
| 76 |
+
from app.services.api_keys import api_key_manager
|
| 77 |
+
await api_key_manager.init()
|
| 78 |
+
logger.info("[Grok2API] API Keyๆฐๆฎๅ ่ฝฝๅฎๆ")
|
| 79 |
+
|
| 80 |
+
# 3.6. ๅ ่ฝฝ็ป่ฎกๅๆฅๅฟๆฐๆฎ
|
| 81 |
+
from app.services.request_stats import request_stats
|
| 82 |
+
from app.services.request_logger import request_logger
|
| 83 |
+
await request_stats.init()
|
| 84 |
+
await request_logger.init()
|
| 85 |
+
logger.info("[Grok2API] ็ป่ฎกๅๆฅๅฟๆฐๆฎๅ ่ฝฝๅฎๆ")
|
| 86 |
+
|
| 87 |
+
# 4. ๅฏๅจๆน้ไฟๅญไปปๅก
|
| 88 |
+
await token_manager.start_batch_save()
|
| 89 |
+
|
| 90 |
+
# 5. ็ฎก็MCPๆๅก็็ๅฝๅจๆ
|
| 91 |
+
mcp_lifespan_context = mcp_app.lifespan(app)
|
| 92 |
+
await mcp_lifespan_context.__aenter__()
|
| 93 |
+
logger.info("[MCP] MCPๆๅกๅๅงๅๅฎๆ")
|
| 94 |
+
|
| 95 |
+
logger.info("[Grok2API] ๅบ็จๅฏๅจๆๅ")
|
| 96 |
+
|
| 97 |
+
try:
|
| 98 |
+
yield
|
| 99 |
+
finally:
|
| 100 |
+
# --- ๅ
ณ้ญ่ฟ็จ ---
|
| 101 |
+
# 1. ้ๅบMCPๆๅก็็ๅฝๅจๆ
|
| 102 |
+
await mcp_lifespan_context.__aexit__(None, None, None)
|
| 103 |
+
logger.info("[MCP] MCPๆๅกๅทฒๅ
ณ้ญ")
|
| 104 |
+
|
| 105 |
+
# 2. ๅ
ณ้ญๆน้ไฟๅญไปปๅกๅนถๅทๆฐๆฐๆฎ
|
| 106 |
+
await token_manager.shutdown()
|
| 107 |
+
logger.info("[Token] Token็ฎก็ๅจๅทฒๅ
ณ้ญ")
|
| 108 |
+
|
| 109 |
+
# 3. ๅ
ณ้ญๆ ธๅฟๆๅก
|
| 110 |
+
await storage_manager.close()
|
| 111 |
+
logger.info("[Grok2API] ๅบ็จๅ
ณ้ญๆๅ")
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# ๅๅงๅๆฅๅฟ
|
| 115 |
+
logger.info("[Grok2API] ๅบ็จๆญฃๅจๅฏๅจ...")
|
| 116 |
+
logger.info("[Grok2API] Fork ็ๆฌ็ปดๆค: @Tomiya233")
|
| 117 |
+
|
| 118 |
+
# ๅๅปบFastAPIๅบ็จ
|
| 119 |
+
app = FastAPI(
|
| 120 |
+
title="Grok2API",
|
| 121 |
+
description="Grok API ่ฝฌๆขๆๅก",
|
| 122 |
+
version="1.3.1",
|
| 123 |
+
lifespan=lifespan
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
# ๆณจๅๅ
จๅฑๅผๅธธๅค็ๅจ
|
| 127 |
+
register_exception_handlers(app)
|
| 128 |
+
|
| 129 |
+
# ๆณจๅ่ทฏ็ฑ
|
| 130 |
+
app.include_router(chat_router, prefix="/v1")
|
| 131 |
+
app.include_router(models_router, prefix="/v1")
|
| 132 |
+
app.include_router(images_router)
|
| 133 |
+
app.include_router(admin_router)
|
| 134 |
+
|
| 135 |
+
# ๆ่ฝฝ้ๆๆไปถ
|
| 136 |
+
app.mount("/static", StaticFiles(directory="app/template"), name="template")
|
| 137 |
+
|
| 138 |
+
@app.get("/")
|
| 139 |
+
async def root():
|
| 140 |
+
"""ๆ น่ทฏๅพ"""
|
| 141 |
+
from fastapi.responses import RedirectResponse
|
| 142 |
+
return RedirectResponse(url="/login")
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
@app.get("/health")
|
| 146 |
+
async def health_check():
|
| 147 |
+
"""ๅฅๅบทๆฃๆฅๆฅๅฃ"""
|
| 148 |
+
return {
|
| 149 |
+
"status": "healthy",
|
| 150 |
+
"service": "Grok2API",
|
| 151 |
+
"version": "1.0.3"
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
# ๆ่ฝฝMCPๆๅกๅจ
|
| 155 |
+
app.mount("", mcp_app)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
if __name__ == "__main__":
|
| 159 |
+
import uvicorn
|
| 160 |
+
import os
|
| 161 |
+
|
| 162 |
+
# ่ฏปๅ worker ๆฐ้๏ผ้ป่ฎคไธบ 1
|
| 163 |
+
workers = int(os.getenv("WORKERS", "1"))
|
| 164 |
+
|
| 165 |
+
# ๆ็คบๅค่ฟ็จๆจกๅผ
|
| 166 |
+
if workers > 1:
|
| 167 |
+
logger.info(
|
| 168 |
+
f"[Grok2API] ๅค่ฟ็จๆจกๅผๅทฒๅฏ็จ (workers={workers})ใ"
|
| 169 |
+
f"ๅปบ่ฎฎไฝฟ็จ Redis/MySQL ๅญๅจไปฅ่ทๅพๆไฝณๆง่ฝใ"
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
# ็กฎๅฎไบไปถๅพช็ฏ็ฑปๅ
|
| 173 |
+
loop_type = "auto"
|
| 174 |
+
if workers == 1 and sys.platform != 'win32':
|
| 175 |
+
try:
|
| 176 |
+
import uvloop
|
| 177 |
+
loop_type = "uvloop"
|
| 178 |
+
except ImportError:
|
| 179 |
+
pass
|
| 180 |
+
|
| 181 |
+
uvicorn.run(
|
| 182 |
+
"main:app",
|
| 183 |
+
host="0.0.0.0",
|
| 184 |
+
port=8000,
|
| 185 |
+
workers=workers,
|
| 186 |
+
loop=loop_type
|
| 187 |
+
)
|
pyproject.toml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "grok2api"
|
| 3 |
+
version = "1.4.3"
|
| 4 |
+
description = "ๅบไบ FastAPI ้ๆ็ Grok2API๏ผ้้
ๆๆฐ็Web่ฐ็จๆ ผๅผ๏ผๆฏๆๆตๅผๅฏน่ฏใๅพๅ็ๆใๅพๅ็ผ่พใ่็ฝๆ็ดขใ่ง้ข็ๆใๆทฑๅบฆๆ่๏ผๆฏๆๅทๆฑ ๅนถๅ่ฐ็จๅ่ชๅจ่ด่ฝฝๅ่กกใ"
|
| 5 |
+
readme = "README.md"
|
| 6 |
+
requires-python = ">=3.13"
|
| 7 |
+
dependencies = [
|
| 8 |
+
"aiofiles==25.1.0",
|
| 9 |
+
"aiomysql==0.2.0",
|
| 10 |
+
"curl-cffi==0.13.0",
|
| 11 |
+
"fastapi==0.119.0",
|
| 12 |
+
"pydantic==2.12.2",
|
| 13 |
+
"python-dotenv==1.1.1",
|
| 14 |
+
"redis==6.4.0",
|
| 15 |
+
"requests==2.32.5",
|
| 16 |
+
"starlette==0.48.0",
|
| 17 |
+
"toml==0.10.2",
|
| 18 |
+
"uvloop==0.21.0 ; sys_platform != 'win32'",
|
| 19 |
+
"uvicorn==0.37.0",
|
| 20 |
+
"portalocker==3.0.0",
|
| 21 |
+
"fastmcp==2.12.4",
|
| 22 |
+
"cryptography==46.0.3",
|
| 23 |
+
"orjson==3.11.4",
|
| 24 |
+
"aiohttp==3.13.2",
|
| 25 |
+
]
|
readme.md
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Grok2api
|
| 3 |
+
emoji: ๐ฆ
|
| 4 |
+
colorFrom: indigo
|
| 5 |
+
colorTo: red
|
| 6 |
+
sdk: docker
|
| 7 |
+
pinned: false
|
| 8 |
+
---
|
| 9 |
+
# Grok2API
|
| 10 |
+
|
| 11 |
+
ๅบไบ **FastAPI** ้ๆ็ Grok2API๏ผๅ
จ้ข้้
ๆๆฐ Web ่ฐ็จๆ ผๅผ๏ผๆฏๆๆตๅผๅฏน่ฏใๅพๅ็ๆใๅพๅ็ผ่พใ่็ฝๆ็ดขใๆทฑๅบฆๆ่๏ผๅทๆฑ ๅนถๅไธ่ชๅจ่ด่ฝฝๅ่กกไธไฝๅใ
|
| 12 |
+
|
| 13 |
+
## ๐ Fork ๅขๅผบๅ่ฝ
|
| 14 |
+
|
| 15 |
+
ๆฌ Fork ๅจๅ็ๅบ็กไธๆฐๅขไปฅไธๅ่ฝ๏ผ
|
| 16 |
+
|
| 17 |
+
- **ๅค Key ็ฎก็ไธๆไน
ๅ**๏ผๆฏๆ็ฎก็ๅๆน้ๅๅปบใๅคๆณจใๅ ้ค API Key๏ผๆฏๆๅค้ๆน้ๆไฝใๆๆๅฏ้ฅๅๅฎ็ฐๆไน
ๅๅญๅจ๏ผ้ๅฏไธไธขๅคฑใ
|
| 18 |
+
- **ๆฅๅฟๅฎก่ฎกๆฏๆ**๏ผๅฎๆถ่ฎฐๅฝ่ฏทๆฑ็ป่๏ผไธๆฅๅฟๆฐๆฎๆฏๆๆไปถๆไน
ๅๅญๅจใ
|
| 19 |
+
- **ๅนถๅๆง่ฝไผๅ (Critical)**๏ผ้ๆไบๅบๅฑ็ Grok ่ฏทๆฑๅๅๅบๅค็้ป่พใ้็จๅ
จๅผๆญฅๆตๅผ่ฟญไปฃ (`aiter_lines`)๏ผๅฝปๅบ่งฃๅณไบๅจๆถๆฏ็ๆ่ฟ็จไธญๅๅฐ็ฎก็้ขๆฟโๅกๆญปโๆๅๅบ็ผๆ
ข็้ฎ้ขใ
|
| 20 |
+
- **Token ๆบ่ฝๅทๅด**๏ผ่ฏทๆฑๅคฑ่ดฅๅ่ชๅจๅทๅด๏ผ้ฟๅ
่ฟ็ปญไฝฟ็จๆ
้ Token
|
| 21 |
+
- ๆฎ้้่ฏฏ๏ผๅทๅด 5 ๆฌก่ฏทๆฑ
|
| 22 |
+
- 429 ้ๆต + ๆ้ขๅบฆ๏ผๅทๅด 1 ๅฐๆถ
|
| 23 |
+
- 429 ้ๆต + ๆ ้ขๅบฆ๏ผๅทๅด 10 ๅฐๆถ
|
| 24 |
+
- **ไธ้ฎๅทๆฐๆๆ Token**๏ผๅๅฐๆ้ฎๆน้ๅทๆฐๅฉไฝๆฌกๆฐ๏ผๅธฆๅฎๆถ่ฟๅบฆๆพ็คบ
|
| 25 |
+
- **ๅนถๅไฟๆค**๏ผๅทๆฐไปปๅก่ฟ่กไธญ่ชๅจๆ็ป้ๅค่ฏทๆฑ
|
| 26 |
+
- **่ฏทๆฑ็ป่ฎกไธๆไน
ๅ**๏ผๆๅฐๆถ/ๅคฉ็ป่ฎก่ฏทๆฑ่ถๅฟ๏ผๅ
ๅซๆๅ็ๅๆจกๅๅๅธๅพ่กจ๏ผ็ป่ฎกๆฐๆฎๆฏๆๆไน
ๅใ
|
| 27 |
+
- **็ผๅญ้ข่ง**๏ผๅๅฐๆทปๅ ็ผๅญ้ข่งๆฟๅ๏ผๅฏๆฅ็็ผๅญ็ๅพ็/่ง้ข
|
| 28 |
+
<br>
|
| 29 |
+
|
| 30 |
+
## ไฝฟ็จ่ฏดๆ
|
| 31 |
+
|
| 32 |
+
### ่ฐ็จๆฌกๆฐไธ้
้ข
|
| 33 |
+
|
| 34 |
+
- **ๆฎ้่ดฆๅท๏ผBasic๏ผ**๏ผๅ
่ดนไฝฟ็จ **80 ๆฌก / 20 ๅฐๆถ**
|
| 35 |
+
- **Super ่ดฆๅท**๏ผ้
้ขๅพ
ๅฎ๏ผไฝ่
ๆชๆต๏ผ
|
| 36 |
+
- ็ณป็ป่ชๅจ่ด่ฝฝๅ่กกๅ่ดฆๅท่ฐ็จๆฌกๆฐ๏ผๅฏๅจ**็ฎก็้กต้ข**ๅฎๆถๆฅ็็จ้ไธ็ถๆ
|
| 37 |
+
|
| 38 |
+
### ๅพๅ็ๆๅ่ฝ
|
| 39 |
+
|
| 40 |
+
- ๅจๅฏน่ฏๅ
ๅฎนไธญ่พๅ
ฅๅฆโ็ปๆ็ปไธไธชๆไบฎโ่ชๅจ่งฆๅๅพ็็ๆ
|
| 41 |
+
- ๆฏๆฌกไปฅ **Markdown ๆ ผๅผ่ฟๅไธคๅผ ๅพ็**๏ผๅ
ฑๆถ่ 4 ๆฌก้ขๅบฆ
|
| 42 |
+
- **ๆณจๆ๏ผGrok ็ๅพ็็ด้พๅ 403 ้ๅถ๏ผ็ณป็ป่ชๅจ็ผๅญๅพ็ๅฐๆฌๅฐใๅฟ
้กปๆญฃ็กฎ่ฎพ็ฝฎ `Base Url` ไปฅ็กฎไฟๅพ็่ฝๆญฃๅธธๆพ็คบ๏ผ**
|
| 43 |
+
|
| 44 |
+
### ่ง้ข็ๆๅ่ฝ
|
| 45 |
+
- ้ๆฉ `grok-imagine-0.9` ๆจกๅ๏ผไผ ๅ
ฅๅพ็ๅๆ็คบ่ฏๅณๅฏ๏ผๆนๅผๅ OpenAI ็ๅพ็ๅๆ่ฐ็จๆ ผๅผไธ่ด๏ผ
|
| 46 |
+
- ่ฟๅๆ ผๅผไธบ `<video src="{full_video_url}" controls="controls"></video>`
|
| 47 |
+
- **ๆณจๆ๏ผGrok ็่ง้ข็ด้พๅ 403 ้ๅถ๏ผ็ณป็ป่ชๅจ็ผๅญๅพ็ๅฐๆฌๅฐใๅฟ
้กปๆญฃ็กฎ่ฎพ็ฝฎ `Base Url` ไปฅ็กฎไฟ่ง้ข่ฝๆญฃๅธธๆพ็คบ๏ผ**
|
| 48 |
+
|
| 49 |
+
```
|
| 50 |
+
curl https://ไฝ ็ๆๅกๅจๅฐๅ/v1/chat/completions \
|
| 51 |
+
-H "Content-Type: application/json" \
|
| 52 |
+
-H "Authorization: Bearer $GROK2API_API_KEY" \
|
| 53 |
+
-d '{
|
| 54 |
+
"model": "grok-imagine-0.9",
|
| 55 |
+
"messages": [
|
| 56 |
+
{
|
| 57 |
+
"role": "user",
|
| 58 |
+
"content": [
|
| 59 |
+
{
|
| 60 |
+
"type": "text",
|
| 61 |
+
"text": "่ฎฉๅคช้ณๅ่ตทๆฅ"
|
| 62 |
+
},
|
| 63 |
+
{
|
| 64 |
+
"type": "image_url",
|
| 65 |
+
"image_url": {
|
| 66 |
+
"url": "https://your-image.jpg"
|
| 67 |
+
}
|
| 68 |
+
}
|
| 69 |
+
]
|
| 70 |
+
}
|
| 71 |
+
]
|
| 72 |
+
}'
|
| 73 |
+
```
|
| 74 |
+
|
| 75 |
+
### ๅ
ณไบ `x_statsig_id`
|
| 76 |
+
|
| 77 |
+
- `x_statsig_id` ๆฏ Grok ็จไบๅๆบๅจไบบ็ Token๏ผๆ้ๅ่ตๆๅฏๅ่
|
| 78 |
+
- **ๅปบ่ฎฎๆฐๆๅฟไฟฎๆน้
็ฝฎ๏ผไฟ็้ป่ฎคๅผๅณๅฏ**
|
| 79 |
+
- ๅฐ่ฏ็จ Camoufox ็ป่ฟ 403 ่ชๅจ่ท id๏ผไฝ grok ็ฐๅทฒ้ๅถ้็ป้็`x_statsig_id`๏ผๆ
ๅผ็จ๏ผ้็จๅบๅฎๅผไปฅๅ
ผๅฎนๆๆ่ฏทๆฑ
|
| 80 |
+
|
| 81 |
+
<br>
|
| 82 |
+
|
| 83 |
+
## ๅฆไฝ้จ็ฝฒ
|
| 84 |
+
|
| 85 |
+
### ๆนๅผไธ๏ผDocker Compose๏ผๆจ่๏ผ
|
| 86 |
+
|
| 87 |
+
็ฑไบๆฌ้กน็ฎๅ
ๅซไฟฎๆน๏ผๅปบ่ฎฎ็ดๆฅๆๅปบ่ฟ่ก๏ผ
|
| 88 |
+
|
| 89 |
+
1. ๅ
้ๆฌไปๅบ
|
| 90 |
+
```bash
|
| 91 |
+
git clone https://github.com/Tomiya233/grok2api.git
|
| 92 |
+
cd grok2api
|
| 93 |
+
```
|
| 94 |
+
|
| 95 |
+
2. ๅฏๅจๆๅก
|
| 96 |
+
```bash
|
| 97 |
+
docker-compose up -d --build
|
| 98 |
+
```
|
| 99 |
+
|
| 100 |
+
**docker-compose.yml ๅ่๏ผ**
|
| 101 |
+
```yaml
|
| 102 |
+
services:
|
| 103 |
+
grok2api:
|
| 104 |
+
build: .
|
| 105 |
+
image: grok2api:latest
|
| 106 |
+
container_name: grok2api
|
| 107 |
+
restart: always
|
| 108 |
+
ports:
|
| 109 |
+
- "8000:8000"
|
| 110 |
+
volumes:
|
| 111 |
+
- grok_data:/app/data
|
| 112 |
+
- ./logs:/app/logs
|
| 113 |
+
environment:
|
| 114 |
+
- LOG_LEVEL=INFO
|
| 115 |
+
logging:
|
| 116 |
+
driver: "json-file"
|
| 117 |
+
options:
|
| 118 |
+
max-size: "10m"
|
| 119 |
+
max-file: "3"
|
| 120 |
+
|
| 121 |
+
volumes:
|
| 122 |
+
grok_data:
|
| 123 |
+
```
|
| 124 |
+
|
| 125 |
+
### ๆนๅผไบ๏ผPython ็ดๆฅ่ฟ่ก
|
| 126 |
+
|
| 127 |
+
**ๅ็ฝฎ่ฆๆฑ**๏ผPython 3.10+๏ผๅปบ่ฎฎไฝฟ็จ `uv` ๅ
็ฎก็ๅจ
|
| 128 |
+
|
| 129 |
+
1. ๅฎ่ฃ
uv
|
| 130 |
+
```bash
|
| 131 |
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
| 132 |
+
```
|
| 133 |
+
|
| 134 |
+
2. ่ฟ่กๆๅก
|
| 135 |
+
```bash
|
| 136 |
+
# ๅฎ่ฃ
ไพ่ตๅนถ่ฟ่ก
|
| 137 |
+
uv sync
|
| 138 |
+
uv run python main.py
|
| 139 |
+
```
|
| 140 |
+
|
| 141 |
+
ๆๅก้ป่ฎค่ฟ่กๅจ `http://127.0.0.1:8000`
|
| 142 |
+
|
| 143 |
+
### ็ฏๅขๅ้่ฏดๆ
|
| 144 |
+
|
| 145 |
+
| ็ฏๅขๅ้ | ๅฟ
ๅกซ | ่ฏดๆ | ็คบไพ |
|
| 146 |
+
|---------------|------|-----------------------------------------|------|
|
| 147 |
+
| STORAGE_MODE | ๅฆ | ๅญๅจๆจกๅผ๏ผfile/mysql/redis | file |
|
| 148 |
+
| DATABASE_URL | ๅฆ | ๆฐๆฎๅบ่ฟๆฅURL๏ผMySQL/Redisๆจกๅผๆถๅฟ
้๏ผ | mysql://user:pass@host:3306/db |
|
| 149 |
+
|
| 150 |
+
**ๅญๅจๆจกๅผ๏ผ**
|
| 151 |
+
- `file`: ๆฌๅฐๆไปถๅญๅจ๏ผ้ป่ฎค๏ผ
|
| 152 |
+
- `mysql`: MySQLๆฐๆฎๅบๅญๅจ๏ผ้่ฎพ็ฝฎDATABASE_URL
|
| 153 |
+
- `redis`: Redis็ผๅญๅญๅจ๏ผ้่ฎพ็ฝฎDATABASE_URL
|
| 154 |
+
|
| 155 |
+
<br>
|
| 156 |
+
|
| 157 |
+
## ๆฅๅฃ่ฏดๆ
|
| 158 |
+
|
| 159 |
+
> ไธ OpenAI ๅฎๆนๆฅๅฃๅฎๅ
จๅ
ผๅฎน๏ฟฝ๏ฟฝ๏ฟฝAPI ่ฏทๆฑ้้่ฟ **Authorization header** ่ฎค่ฏ
|
| 160 |
+
|
| 161 |
+
| ๆนๆณ | ็ซฏ็น | ๆ่ฟฐ | ๆฏๅฆ้่ฆ่ฎค่ฏ |
|
| 162 |
+
|-------|------------------------------|------------------------------------|------|
|
| 163 |
+
| POST | `/v1/chat/completions` | ๅๅปบ่ๅคฉๅฏน่ฏ๏ผๆตๅผ/้ๆตๅผ๏ผ | โ
|
|
| 164 |
+
| GET | `/v1/models` | ่ทๅๅ
จ้จๆฏๆๆจกๅ | โ
|
|
| 165 |
+
| GET | `/images/{img_path}` | ่ทๅ็ๆๅพ็ๆไปถ | โ |
|
| 166 |
+
|
| 167 |
+
<br>
|
| 168 |
+
|
| 169 |
+
<details>
|
| 170 |
+
<summary>็ฎก็ไธ็ป่ฎกๆฅๅฃ๏ผๅฑๅผๆฅ็ๆดๅค๏ผ</summary>
|
| 171 |
+
|
| 172 |
+
| ๆนๆณ | ็ซฏ็น | ๆ่ฟฐ | ่ฎค่ฏ |
|
| 173 |
+
|-------|-------------------------|--------------------|------|
|
| 174 |
+
| GET | /login | ็ฎก็ๅ็ปๅฝ้กต้ข | โ |
|
| 175 |
+
| GET | /manage | ็ฎก็ๆงๅถๅฐ้กต้ข | โ |
|
| 176 |
+
| POST | /api/login | ็ฎก็ๅ็ปๅฝ่ฎค่ฏ | โ |
|
| 177 |
+
| POST | /api/logout | ็ฎก็ๅ็ปๅบ | โ
|
|
| 178 |
+
| GET | /api/tokens | ่ทๅ Token ๅ่กจ | โ
|
|
| 179 |
+
| POST | /api/tokens/add | ๆน้ๆทปๅ Token | โ
|
|
| 180 |
+
| POST | /api/tokens/delete | ๆน้ๅ ้ค Token | โ
|
|
| 181 |
+
| GET | /api/settings | ่ทๅ็ณป็ป้
็ฝฎ | โ
|
|
| 182 |
+
| POST | /api/settings | ๆดๆฐ็ณป็ป้
็ฝฎ | โ
|
|
| 183 |
+
| GET | /api/cache/size | ่ทๅ็ผๅญๅคงๅฐ | โ
|
|
| 184 |
+
| POST | /api/cache/clear | ๆธ
็ๆๆ็ผๅญ | โ
|
|
| 185 |
+
| POST | /api/cache/clear/images | ๆธ
็ๅพ็็ผๅญ | โ
|
|
| 186 |
+
| POST | /api/cache/clear/videos | ๆธ
็่ง้ข็ผๅญ | โ
|
|
| 187 |
+
| GET | /api/stats | ่ทๅ็ป่ฎกไฟกๆฏ | โ
|
|
| 188 |
+
| POST | /api/tokens/tags | ๆดๆฐ Token ๆ ็ญพ | โ
|
|
| 189 |
+
| POST | /api/tokens/note | ๆดๆฐ Token ๅคๆณจ | โ
|
|
| 190 |
+
| POST | /api/tokens/test | ๆต่ฏ Token ๅฏ็จๆง | โ
|
|
| 191 |
+
| GET | /api/tokens/tags/all | ่ทๅๆๆๆ ็ญพๅ่กจ | โ
|
|
| 192 |
+
| GET | /api/storage/mode | ่ทๅๅญๅจๆจกๅผไฟกๆฏ | โ
|
|
| 193 |
+
| POST | /api/tokens/refresh-all | ไธ้ฎๅทๆฐๆๆToken | โ
|
|
| 194 |
+
| GET | /api/tokens/refresh-progress | ่ทๅๅทๆฐ่ฟๅบฆ | โ
|
|
| 195 |
+
| GET | /api/keys | ่ทๅ API Key ๅ่กจ | โ
|
|
| 196 |
+
| POST | /api/keys/add | ๅๅปบๆฐ API Key | โ
|
|
| 197 |
+
| POST | /api/keys/delete | ๅ ้ค API Key | โ
|
|
| 198 |
+
| POST | /api/keys/status | ๅๆข Key ๅฏ็จ็ถๆ | โ
|
|
| 199 |
+
| POST | /api/keys/name | ไฟฎๆน Key ๅคๆณจๅ็งฐ | โ
|
|
| 200 |
+
| GET | /api/logs | ่ทๅ่ฏทๆฑๆฅๅฟ(1000ๆก)| โ
|
|
| 201 |
+
| POST | /api/logs/clear | ๆธ
็ฉบๆๆๅฎก่ฎกๆฅๅฟ | โ
|
|
| 202 |
+
|
| 203 |
+
</details>
|
| 204 |
+
|
| 205 |
+
<br>
|
| 206 |
+
|
| 207 |
+
## ๅฏ็จๆจกๅไธ่ง
|
| 208 |
+
|
| 209 |
+
| ๆจกๅๅ็งฐ | ่ฎกๆฌก | ่ดฆๆท็ฑปๅ | ๅพๅ็ๆ/็ผ่พ | ๆทฑๅบฆๆ่ | ่็ฝๆ็ดข | ่ง้ข็ๆ |
|
| 210 |
+
|------------------------|--------|--------------|--------------|----------|----------|----------|
|
| 211 |
+
| `grok-4.1` | 1 | Basic/Super | โ
| โ
| โ
| โ |
|
| 212 |
+
| `grok-4.1-thinking` | 1 | Basic/Super | โ
| โ
| โ
| โ |
|
| 213 |
+
| `grok-imagine-0.9` | - | Basic/Super | โ
| โ | โ | โ
|
|
| 214 |
+
| `grok-4-fast` | 1 | Basic/Super | โ
| โ
| โ
| โ |
|
| 215 |
+
| `grok-4-fast-expert` | 4 | Basic/Super | โ
| โ
| โ
| โ |
|
| 216 |
+
| `grok-4-expert` | 4 | Basic/Super | โ
| โ
| โ
| โ |
|
| 217 |
+
| `grok-4-heavy` | 1 | Super | โ
| โ
| โ
| โ |
|
| 218 |
+
| `grok-3-fast` | 1 | Basic/Super | โ
| โ | โ
| โ |
|
| 219 |
+
|
| 220 |
+
<br>
|
| 221 |
+
|
| 222 |
+
## ้
็ฝฎๅๆฐ่ฏดๆ
|
| 223 |
+
|
| 224 |
+
> ๆๅกๅฏๅจๅ๏ผ็ปๅฝ `/login` ็ฎก็ๅๅฐ่ฟ่กๅๆฐ้
็ฝฎ
|
| 225 |
+
|
| 226 |
+
| ๅๆฐๅ | ไฝ็จๅ | ๅฟ
ๅกซ | ่ฏดๆ | ้ป่ฎคๅผ |
|
| 227 |
+
|----------------------------|---------|------|-----------------------------------------|--------|
|
| 228 |
+
| admin_username | global | ๅฆ | ็ฎก็ๅๅฐ็ปๅฝ็จๆทๅ | "admin"|
|
| 229 |
+
| admin_password | global | ๅฆ | ็ฎก็ๅๅฐ็ปๅฝๅฏ็ | "admin"|
|
| 230 |
+
| log_level | global | ๅฆ | ๆฅๅฟ็บงๅซ๏ผDEBUG/INFO/... | "INFO" |
|
| 231 |
+
| image_mode | global | ๅฆ | ๅพ็่ฟๅๆจกๅผ๏ผurl/base64 | "url" |
|
| 232 |
+
| image_cache_max_size_mb | global | ๅฆ | ๅพ็็ผๅญๆๅคงๅฎน้(MB) | 512 |
|
| 233 |
+
| video_cache_max_size_mb | global | ๅฆ | ่ง้ข็ผๅญๆๅคงๅฎน้(MB) | 1024 |
|
| 234 |
+
| base_url | global | ๅฆ | ๆๅกๅบ็กURL/ๅพ็่ฎฟ้ฎๅบๅ | "" |
|
| 235 |
+
| api_key | grok | ๅฆ | API ๅฏ้ฅ๏ผๅฏ้ๅ ๅผบๅฎๅ
จ๏ผ | "" |
|
| 236 |
+
| proxy_url | grok | ๅฆ | HTTPไปฃ็ๆๅกๅจๅฐๅ | "" |
|
| 237 |
+
| stream_chunk_timeout | grok | ๅฆ | ๆตๅผๅๅ่ถ
ๆถๆถ้ด(็ง) | 120 |
|
| 238 |
+
| stream_first_response_timeout | grok | ๅฆ | ๆตๅผ้ฆๆฌกๅๅบ่ถ
ๆถๆถ้ด(็ง) | 30 |
|
| 239 |
+
| stream_total_timeout | grok | ๅฆ | ๆตๅผๆป่ถ
ๆถๆถ้ด(็ง) | 600 |
|
| 240 |
+
| cf_clearance | grok | ๅฆ | Cloudflareๅฎๅ
จไปค็ | "" |
|
| 241 |
+
| x_statsig_id | grok | ๆฏ | ๅๆบๅจไบบๅฏไธๆ ่ฏ็ฌฆ | "ZTpUeXBlRXJyb3I6IENhbm5vdCByZWFkIHByb3BlcnRpZXMgb2YgdW5kZWZpbmVkIChyZWFkaW5nICdjaGlsZE5vZGVzJyk=" |
|
| 242 |
+
| filtered_tags | grok | ๅฆ | ่ฟๆปคๅๅบๆ ็ญพ๏ผ้ๅทๅ้๏ผ | "xaiartifact,xai:tool_usage_card,grok:render" |
|
| 243 |
+
| show_thinking | grok | ๅฆ | ๆพ็คบๆ่่ฟ็จ true(ๆพ็คบ)/false(้่) | true |
|
| 244 |
+
| temporary | grok | ๅฆ | ไผ่ฏๆจกๅผ true(ไธดๆถ)/false | true |
|
| 245 |
+
|
| 246 |
+
<br>
|
| 247 |
+
|
| 248 |
+
## โ ๏ธ ๆณจๆไบ้กน
|
| 249 |
+
|
| 250 |
+
ๆฌ้กน็ฎไป
ไพๅญฆไน ไธ็ ็ฉถ๏ผ่ฏท้ตๅฎ็ธๅ
ณไฝฟ็จๆกๆฌพ๏ผ
|
| 251 |
+
|
| 252 |
+
<br>
|
| 253 |
+
|
| 254 |
+
> ๆฌ้กน็ฎๅบไบไปฅไธ้กน็ฎๅญฆไน ้ๆ๏ผ็นๅซๆ่ฐข๏ผ[LINUX DO](https://linux.do)ใ[VeroFess/grok2api](https://github.com/VeroFess/grok2api)ใ[xLmiler/grok2api_python](https://github.com/xLmiler/grok2api_python)
|
requirements.txt
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
toml==0.10.2
|
| 2 |
+
fastapi==0.119.0
|
| 3 |
+
uvicorn==0.37.0
|
| 4 |
+
uvloop==0.21.0; sys_platform != 'win32'
|
| 5 |
+
python-dotenv==1.1.1
|
| 6 |
+
curl_cffi==0.13.0
|
| 7 |
+
requests==2.32.5
|
| 8 |
+
starlette==0.48.0
|
| 9 |
+
pydantic==2.12.2
|
| 10 |
+
aiofiles==25.1.0
|
| 11 |
+
portalocker==3.0.0
|
| 12 |
+
aiomysql==0.2.0
|
| 13 |
+
redis==6.4.0
|
| 14 |
+
fastmcp==2.12.4
|
| 15 |
+
cryptography==46.0.3
|
| 16 |
+
orjson==3.11.4
|
| 17 |
+
aiohttp==3.13.2
|
test/test_concurrency.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Grok2API ๅนถๅๆง่ฝๆต่ฏ่ๆฌ
|
| 4 |
+
|
| 5 |
+
ๆต่ฏไธๅๅนถๅ็บงๅซไธ็APIๆง่ฝ่กจ็ฐ
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import asyncio
|
| 9 |
+
import aiohttp
|
| 10 |
+
import time
|
| 11 |
+
import statistics
|
| 12 |
+
import argparse
|
| 13 |
+
from datetime import datetime
|
| 14 |
+
from typing import List, Dict, Any
|
| 15 |
+
import json
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class ConcurrencyTester:
|
| 19 |
+
"""ๅนถๅๆต่ฏๅจ"""
|
| 20 |
+
|
| 21 |
+
def __init__(self, base_url: str, api_key: str = None):
|
| 22 |
+
self.base_url = base_url.rstrip('/')
|
| 23 |
+
self.api_key = api_key
|
| 24 |
+
self.results: List[Dict[str, Any]] = []
|
| 25 |
+
|
| 26 |
+
async def test_request(self, session: aiohttp.ClientSession, request_id: int) -> Dict[str, Any]:
|
| 27 |
+
"""ๅ้ๅไธชๆต่ฏ่ฏทๆฑ"""
|
| 28 |
+
url = f"{self.base_url}/v1/chat/completions"
|
| 29 |
+
|
| 30 |
+
headers = {
|
| 31 |
+
"Content-Type": "application/json"
|
| 32 |
+
}
|
| 33 |
+
if self.api_key:
|
| 34 |
+
headers["Authorization"] = f"Bearer {self.api_key}"
|
| 35 |
+
|
| 36 |
+
payload = {
|
| 37 |
+
"model": "grok-3-fast",
|
| 38 |
+
"messages": [
|
| 39 |
+
{"role": "user", "content": f"ๆต่ฏ่ฏทๆฑ #{request_id}๏ผ่ฏท็ฎ็ญๅๅคOK"}
|
| 40 |
+
],
|
| 41 |
+
"stream": False,
|
| 42 |
+
"max_tokens": 10
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
start_time = time.time()
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
async with session.post(url, json=payload, headers=headers, timeout=30) as response:
|
| 49 |
+
status = response.status
|
| 50 |
+
|
| 51 |
+
if status == 200:
|
| 52 |
+
data = await response.json()
|
| 53 |
+
elapsed = time.time() - start_time
|
| 54 |
+
|
| 55 |
+
return {
|
| 56 |
+
"id": request_id,
|
| 57 |
+
"status": "success",
|
| 58 |
+
"http_status": status,
|
| 59 |
+
"elapsed": elapsed,
|
| 60 |
+
"response_length": len(json.dumps(data))
|
| 61 |
+
}
|
| 62 |
+
else:
|
| 63 |
+
elapsed = time.time() - start_time
|
| 64 |
+
error_text = await response.text()
|
| 65 |
+
|
| 66 |
+
return {
|
| 67 |
+
"id": request_id,
|
| 68 |
+
"status": "error",
|
| 69 |
+
"http_status": status,
|
| 70 |
+
"elapsed": elapsed,
|
| 71 |
+
"error": error_text[:200]
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
except asyncio.TimeoutError:
|
| 75 |
+
elapsed = time.time() - start_time
|
| 76 |
+
return {
|
| 77 |
+
"id": request_id,
|
| 78 |
+
"status": "timeout",
|
| 79 |
+
"elapsed": elapsed,
|
| 80 |
+
"error": "Request timeout"
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
except Exception as e:
|
| 84 |
+
elapsed = time.time() - start_time
|
| 85 |
+
return {
|
| 86 |
+
"id": request_id,
|
| 87 |
+
"status": "exception",
|
| 88 |
+
"elapsed": elapsed,
|
| 89 |
+
"error": str(e)
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
async def run_concurrent_test(self, concurrency: int, total_requests: int):
|
| 93 |
+
"""่ฟ่กๅนถๅๆต่ฏ"""
|
| 94 |
+
print(f"\n{'='*60}")
|
| 95 |
+
print(f"๐ ๆต่ฏ้
็ฝฎ๏ผๅนถๅๆฐ {concurrency}, ๆป่ฏทๆฑๆฐ {total_requests}")
|
| 96 |
+
print(f"{'='*60}")
|
| 97 |
+
|
| 98 |
+
connector = aiohttp.TCPConnector(limit=concurrency, limit_per_host=concurrency)
|
| 99 |
+
timeout = aiohttp.ClientTimeout(total=60)
|
| 100 |
+
|
| 101 |
+
async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session:
|
| 102 |
+
# ้ข็ญ
|
| 103 |
+
print("๐ฅ ้ข็ญไธญ...")
|
| 104 |
+
await self.test_request(session, 0)
|
| 105 |
+
|
| 106 |
+
# ๅผๅงๆต่ฏ
|
| 107 |
+
print(f"๐ ๅผๅงๅนถๅๆต่ฏ...")
|
| 108 |
+
start_time = time.time()
|
| 109 |
+
|
| 110 |
+
# ๅๅปบไปปๅก
|
| 111 |
+
tasks = []
|
| 112 |
+
for i in range(1, total_requests + 1):
|
| 113 |
+
task = asyncio.create_task(self.test_request(session, i))
|
| 114 |
+
tasks.append(task)
|
| 115 |
+
|
| 116 |
+
# ๆงๅถๅนถๅๆฐ
|
| 117 |
+
if len(tasks) >= concurrency:
|
| 118 |
+
results = await asyncio.gather(*tasks)
|
| 119 |
+
self.results.extend(results)
|
| 120 |
+
tasks = []
|
| 121 |
+
|
| 122 |
+
# ๆพ็คบ่ฟๅบฆ
|
| 123 |
+
print(f" ่ฟๅบฆ: {i}/{total_requests} ({i/total_requests*100:.1f}%)", end='\r')
|
| 124 |
+
|
| 125 |
+
# ๅค็ๅฉไฝไปปๅก
|
| 126 |
+
if tasks:
|
| 127 |
+
results = await asyncio.gather(*tasks)
|
| 128 |
+
self.results.extend(results)
|
| 129 |
+
|
| 130 |
+
total_time = time.time() - start_time
|
| 131 |
+
|
| 132 |
+
# ็ป่ฎกๅ่พๅบ
|
| 133 |
+
self.print_statistics(concurrency, total_requests, total_time)
|
| 134 |
+
|
| 135 |
+
def print_statistics(self, concurrency: int, total_requests: int, total_time: float):
|
| 136 |
+
"""ๆๅฐ็ป่ฎกไฟกๆฏ"""
|
| 137 |
+
success_results = [r for r in self.results if r["status"] == "success"]
|
| 138 |
+
error_results = [r for r in self.results if r["status"] != "success"]
|
| 139 |
+
|
| 140 |
+
success_count = len(success_results)
|
| 141 |
+
error_count = len(error_results)
|
| 142 |
+
|
| 143 |
+
if success_results:
|
| 144 |
+
latencies = [r["elapsed"] for r in success_results]
|
| 145 |
+
avg_latency = statistics.mean(latencies)
|
| 146 |
+
min_latency = min(latencies)
|
| 147 |
+
max_latency = max(latencies)
|
| 148 |
+
p50_latency = statistics.median(latencies)
|
| 149 |
+
p95_latency = sorted(latencies)[int(len(latencies) * 0.95)] if len(latencies) > 1 else latencies[0]
|
| 150 |
+
p99_latency = sorted(latencies)[int(len(latencies) * 0.99)] if len(latencies) > 1 else latencies[0]
|
| 151 |
+
else:
|
| 152 |
+
avg_latency = min_latency = max_latency = p50_latency = p95_latency = p99_latency = 0
|
| 153 |
+
|
| 154 |
+
throughput = total_requests / total_time if total_time > 0 else 0
|
| 155 |
+
|
| 156 |
+
print(f"\n\n{'='*60}")
|
| 157 |
+
print(f"๐ ๆต่ฏ็ปๆ็ป่ฎก")
|
| 158 |
+
print(f"{'='*60}")
|
| 159 |
+
print(f" ๆต่ฏๆถ้ด: {total_time:.2f}s")
|
| 160 |
+
print(f" ๆป่ฏทๆฑๆฐ: {total_requests}")
|
| 161 |
+
print(f" ๅนถๅๆฐ: {concurrency}")
|
| 162 |
+
print(f"")
|
| 163 |
+
print(f" ๆๅ่ฏทๆฑ: {success_count} ({success_count/total_requests*100:.1f}%)")
|
| 164 |
+
print(f" ๅคฑ่ดฅ่ฏทๆฑ: {error_count} ({error_count/total_requests*100:.1f}%)")
|
| 165 |
+
print(f"")
|
| 166 |
+
print(f" ๅๅ้: {throughput:.2f} req/s")
|
| 167 |
+
print(f"")
|
| 168 |
+
print(f" ๅปถ่ฟ็ป่ฎก:")
|
| 169 |
+
print(f" ๆๅฐ: {min_latency*1000:.0f}ms")
|
| 170 |
+
print(f" ๅนณๅ: {avg_latency*1000:.0f}ms")
|
| 171 |
+
print(f" ๆๅคง: {max_latency*1000:.0f}ms")
|
| 172 |
+
print(f" P50: {p50_latency*1000:.0f}ms")
|
| 173 |
+
print(f" P95: {p95_latency*1000:.0f}ms")
|
| 174 |
+
print(f" P99: {p99_latency*1000:.0f}ms")
|
| 175 |
+
|
| 176 |
+
# ้่ฏฏ่ฏฆๆ
|
| 177 |
+
if error_results:
|
| 178 |
+
print(f"\n โ ๏ธ ้่ฏฏ่ฏฆๆ
:")
|
| 179 |
+
error_types = {}
|
| 180 |
+
for r in error_results:
|
| 181 |
+
error_type = r.get("status", "unknown")
|
| 182 |
+
error_types[error_type] = error_types.get(error_type, 0) + 1
|
| 183 |
+
|
| 184 |
+
for error_type, count in error_types.items():
|
| 185 |
+
print(f" {error_type}: {count}")
|
| 186 |
+
|
| 187 |
+
print(f"{'='*60}\n")
|
| 188 |
+
|
| 189 |
+
# ๆง่ฝ่ฏ็บง
|
| 190 |
+
self.print_performance_rating(throughput, avg_latency)
|
| 191 |
+
|
| 192 |
+
def print_performance_rating(self, throughput: float, avg_latency: float):
|
| 193 |
+
"""ๆๅฐๆง่ฝ่ฏ็บง"""
|
| 194 |
+
print(f"๐ฏ ๆง่ฝ่ฏ็บง:")
|
| 195 |
+
|
| 196 |
+
# ๅๅ้่ฏ็บง
|
| 197 |
+
if throughput >= 100:
|
| 198 |
+
rating = "โญโญโญโญโญ ไผ็ง"
|
| 199 |
+
elif throughput >= 60:
|
| 200 |
+
rating = "โญโญโญโญ ่ฏๅฅฝ"
|
| 201 |
+
elif throughput >= 30:
|
| 202 |
+
rating = "โญโญโญ ไธญ็ญ"
|
| 203 |
+
elif throughput >= 10:
|
| 204 |
+
rating = "โญโญ ่พไฝ"
|
| 205 |
+
else:
|
| 206 |
+
rating = "โญ ้ไผๅ"
|
| 207 |
+
|
| 208 |
+
print(f" ๅๅ้ ({throughput:.1f} req/s): {rating}")
|
| 209 |
+
|
| 210 |
+
# ๅปถ่ฟ่ฏ็บง
|
| 211 |
+
if avg_latency < 0.5:
|
| 212 |
+
rating = "โญโญโญโญโญ ไผ็ง"
|
| 213 |
+
elif avg_latency < 1.0:
|
| 214 |
+
rating = "โญโญโญโญ ่ฏๅฅฝ"
|
| 215 |
+
elif avg_latency < 2.0:
|
| 216 |
+
rating = "โญโญโญ ไธญ็ญ"
|
| 217 |
+
elif avg_latency < 5.0:
|
| 218 |
+
rating = "โญโญ ่พ้ซ"
|
| 219 |
+
else:
|
| 220 |
+
rating = "โญ ้ไผๅ"
|
| 221 |
+
|
| 222 |
+
print(f" ๅนณๅๅปถ่ฟ ({avg_latency*1000:.0f}ms): {rating}")
|
| 223 |
+
print()
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
async def main():
|
| 227 |
+
"""ไธปๅฝๆฐ"""
|
| 228 |
+
parser = argparse.ArgumentParser(description='Grok2API ๅนถๅๆง่ฝๆต่ฏ')
|
| 229 |
+
parser.add_argument('--url', default='http://localhost:8000', help='API ๅบ็กURL')
|
| 230 |
+
parser.add_argument('--key', default='', help='API Key๏ผๅฏ้๏ผ')
|
| 231 |
+
parser.add_argument('-c', '--concurrency', type=int, default=10, help='ๅนถๅๆฐ')
|
| 232 |
+
parser.add_argument('-n', '--requests', type=int, default=50, help='ๆป่ฏทๆฑๆฐ')
|
| 233 |
+
parser.add_argument('--multi-test', action='store_true', help='่ฟ่กๅค็บงๅนถๅๆต่ฏ')
|
| 234 |
+
|
| 235 |
+
args = parser.parse_args()
|
| 236 |
+
|
| 237 |
+
print(f"""
|
| 238 |
+
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
| 239 |
+
โ Grok2API ๅนถๅๆง่ฝๆต่ฏๅทฅๅ
ท โ
|
| 240 |
+
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
| 241 |
+
|
| 242 |
+
๐ ๆต่ฏ็ฎๆ : {args.url}
|
| 243 |
+
๐ API Key: {'ๅทฒ่ฎพ็ฝฎ' if args.key else 'ๆช่ฎพ็ฝฎ'}
|
| 244 |
+
โฐ ๅผๅงๆถ้ด: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
|
| 245 |
+
""")
|
| 246 |
+
|
| 247 |
+
tester = ConcurrencyTester(args.url, args.key)
|
| 248 |
+
|
| 249 |
+
if args.multi_test:
|
| 250 |
+
# ๅค็บงๅนถๅๆต่ฏ
|
| 251 |
+
test_configs = [
|
| 252 |
+
(5, 20), # 5ๅนถๅ๏ผ20่ฏทๆฑ
|
| 253 |
+
(10, 50), # 10ๅนถๅ๏ผ50่ฏทๆฑ
|
| 254 |
+
(20, 100), # 20ๅนถๅ๏ผ100่ฏทๆฑ
|
| 255 |
+
(50, 200), # 50ๅนถๅ๏ผ200่ฏทๆฑ
|
| 256 |
+
]
|
| 257 |
+
|
| 258 |
+
for concurrency, requests in test_configs:
|
| 259 |
+
tester.results = [] # ๆธ
็ฉบ็ปๆ
|
| 260 |
+
await tester.run_concurrent_test(concurrency, requests)
|
| 261 |
+
await asyncio.sleep(2) # ้ด้2็ง
|
| 262 |
+
else:
|
| 263 |
+
# ๅๆฌกๆต่ฏ
|
| 264 |
+
await tester.run_concurrent_test(args.concurrency, args.requests)
|
| 265 |
+
|
| 266 |
+
print(f"\nโ
ๆต่ฏๅฎๆ๏ผ")
|
| 267 |
+
print(f"โฐ ็ปๆๆถ้ด: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
if __name__ == "__main__":
|
| 271 |
+
try:
|
| 272 |
+
asyncio.run(main())
|
| 273 |
+
except KeyboardInterrupt:
|
| 274 |
+
print("\n\nโ ๏ธ ๆต่ฏ่ขซ็จๆทไธญๆญ")
|
| 275 |
+
except Exception as e:
|
| 276 |
+
print(f"\n\nโ ๆต่ฏๅคฑ่ดฅ: {e}")
|
test/test_concurrency.sh
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# Grok2API ๅนถๅๆต่ฏ่ๆฌ๏ผShell็ๆฌ๏ผ
|
| 4 |
+
# ไฝฟ็จ curl ๅ GNU parallel ่ฟ่กๅนถๅๆต่ฏ
|
| 5 |
+
|
| 6 |
+
set -e
|
| 7 |
+
|
| 8 |
+
# ้
็ฝฎ
|
| 9 |
+
BASE_URL="${BASE_URL:-http://localhost:8000}"
|
| 10 |
+
API_KEY="${API_KEY:-}"
|
| 11 |
+
CONCURRENCY="${CONCURRENCY:-10}"
|
| 12 |
+
TOTAL_REQUESTS="${TOTAL_REQUESTS:-50}"
|
| 13 |
+
|
| 14 |
+
# ้ข่ฒ
|
| 15 |
+
RED='\033[0;31m'
|
| 16 |
+
GREEN='\033[0;32m'
|
| 17 |
+
YELLOW='\033[1;33m'
|
| 18 |
+
BLUE='\033[0;34m'
|
| 19 |
+
NC='\033[0m' # No Color
|
| 20 |
+
|
| 21 |
+
echo -e "${BLUE}โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ${NC}"
|
| 22 |
+
echo -e "${BLUE}โ Grok2API ๅนถๅๆง่ฝๆต่ฏๅทฅๅ
ท (Shell็) โ${NC}"
|
| 23 |
+
echo -e "${BLUE}โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ${NC}"
|
| 24 |
+
echo ""
|
| 25 |
+
echo -e "${GREEN}๐ ๆต่ฏ็ฎๆ :${NC} $BASE_URL"
|
| 26 |
+
echo -e "${GREEN}๐ API Key:${NC} ${API_KEY:-(ๆช่ฎพ็ฝฎ)}"
|
| 27 |
+
echo -e "${GREEN}๐ ๅนถๅๆฐ:${NC} $CONCURRENCY"
|
| 28 |
+
echo -e "${GREEN}๐ ๆป่ฏทๆฑๆฐ:${NC} $TOTAL_REQUESTS"
|
| 29 |
+
echo ""
|
| 30 |
+
|
| 31 |
+
# ๆฃๆฅไพ่ต
|
| 32 |
+
if ! command -v curl &> /dev/null; then
|
| 33 |
+
echo -e "${RED}โ ้่ฏฏ: ้่ฆๅฎ่ฃ
curl${NC}"
|
| 34 |
+
exit 1
|
| 35 |
+
fi
|
| 36 |
+
|
| 37 |
+
# ๅๅปบไธดๆถ็ฎๅฝ
|
| 38 |
+
TMP_DIR=$(mktemp -d)
|
| 39 |
+
trap "rm -rf $TMP_DIR" EXIT
|
| 40 |
+
|
| 41 |
+
# ๅไธช่ฏทๆฑๅฝๆฐ
|
| 42 |
+
test_request() {
|
| 43 |
+
local request_id=$1
|
| 44 |
+
local start_time=$(date +%s.%N)
|
| 45 |
+
|
| 46 |
+
# ๆๅปบ่ฏทๆฑ
|
| 47 |
+
local headers="Content-Type: application/json"
|
| 48 |
+
if [ -n "$API_KEY" ]; then
|
| 49 |
+
headers="${headers}\nAuthorization: Bearer ${API_KEY}"
|
| 50 |
+
fi
|
| 51 |
+
|
| 52 |
+
local response=$(curl -s -w "\n%{http_code}\n%{time_total}" \
|
| 53 |
+
-X POST "${BASE_URL}/v1/chat/completions" \
|
| 54 |
+
-H "Content-Type: application/json" \
|
| 55 |
+
${API_KEY:+-H "Authorization: Bearer $API_KEY"} \
|
| 56 |
+
-d "{
|
| 57 |
+
\"model\": \"grok-3-fast\",
|
| 58 |
+
\"messages\": [{\"role\": \"user\", \"content\": \"ๆต่ฏ่ฏทๆฑ #${request_id}๏ผ่ฏท็ฎ็ญๅๅคOK\"}],
|
| 59 |
+
\"stream\": false,
|
| 60 |
+
\"max_tokens\": 10
|
| 61 |
+
}" 2>&1)
|
| 62 |
+
|
| 63 |
+
local http_code=$(echo "$response" | tail -n 2 | head -n 1)
|
| 64 |
+
local time_total=$(echo "$response" | tail -n 1)
|
| 65 |
+
|
| 66 |
+
# ่ฎฐๅฝ็ปๆ
|
| 67 |
+
echo "${request_id},${http_code},${time_total}" >> "$TMP_DIR/results.csv"
|
| 68 |
+
|
| 69 |
+
# ๆพ็คบ่ฟๅบฆ
|
| 70 |
+
echo -ne "\r ่ฟๅบฆ: ${request_id}/${TOTAL_REQUESTS}"
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
# ๅฏผๅบๅฝๆฐไพ parallel ไฝฟ็จ
|
| 74 |
+
export -f test_request
|
| 75 |
+
export BASE_URL API_KEY TMP_DIR
|
| 76 |
+
|
| 77 |
+
# ๆธ
็ฉบ็ปๆๆไปถ
|
| 78 |
+
echo "id,status,time" > "$TMP_DIR/results.csv"
|
| 79 |
+
|
| 80 |
+
echo -e "${YELLOW}๐ ๅผๅงๅนถๅๆต่ฏ...${NC}"
|
| 81 |
+
START_TIME=$(date +%s.%N)
|
| 82 |
+
|
| 83 |
+
# ไฝฟ็จ GNU parallel๏ผๅฆๆๅฏ็จ๏ผ๏ผๅฆๅไฝฟ็จ็ฎๅๅพช็ฏ
|
| 84 |
+
if command -v parallel &> /dev/null; then
|
| 85 |
+
seq 1 $TOTAL_REQUESTS | parallel -j $CONCURRENCY test_request {}
|
| 86 |
+
else
|
| 87 |
+
# ็ฎๅ็ๅๅฐไปปๅกๅนถๅ
|
| 88 |
+
for i in $(seq 1 $TOTAL_REQUESTS); do
|
| 89 |
+
test_request $i &
|
| 90 |
+
|
| 91 |
+
# ๆงๅถๅนถๅๆฐ
|
| 92 |
+
if (( i % CONCURRENCY == 0 )); then
|
| 93 |
+
wait
|
| 94 |
+
fi
|
| 95 |
+
done
|
| 96 |
+
wait
|
| 97 |
+
fi
|
| 98 |
+
|
| 99 |
+
END_TIME=$(date +%s.%N)
|
| 100 |
+
TOTAL_TIME=$(echo "$END_TIME - $START_TIME" | bc)
|
| 101 |
+
|
| 102 |
+
echo -e "\n"
|
| 103 |
+
|
| 104 |
+
# ็ป่ฎก็ปๆ
|
| 105 |
+
echo -e "${BLUE}โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ${NC}"
|
| 106 |
+
echo -e "${BLUE}๐ ๆต่ฏ็ปๆ็ป่ฎก${NC}"
|
| 107 |
+
echo -e "${BLUE}โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ${NC}"
|
| 108 |
+
|
| 109 |
+
# ็ป่ฎกๆๅ/ๅคฑ่ดฅ
|
| 110 |
+
SUCCESS_COUNT=$(awk -F',' '$2 == 200 {count++} END {print count+0}' "$TMP_DIR/results.csv")
|
| 111 |
+
ERROR_COUNT=$((TOTAL_REQUESTS - SUCCESS_COUNT))
|
| 112 |
+
|
| 113 |
+
echo -e " ๆต่ฏๆถ้ด: ${TOTAL_TIME}s"
|
| 114 |
+
echo -e " ๆป่ฏทๆฑๆฐ: ${TOTAL_REQUESTS}"
|
| 115 |
+
echo -e " ๅนถๅๆฐ: ${CONCURRENCY}"
|
| 116 |
+
echo ""
|
| 117 |
+
echo -e " ๆๅ่ฏทๆฑ: ${GREEN}${SUCCESS_COUNT}${NC} ($(echo "scale=1; $SUCCESS_COUNT * 100 / $TOTAL_REQUESTS" | bc)%)"
|
| 118 |
+
echo -e " ๅคฑ่ดฅ่ฏทๆฑ: ${RED}${ERROR_COUNT}${NC} ($(echo "scale=1; $ERROR_COUNT * 100 / $TOTAL_REQUESTS" | bc)%)"
|
| 119 |
+
echo ""
|
| 120 |
+
|
| 121 |
+
# ่ฎก็ฎๅๅ้
|
| 122 |
+
THROUGHPUT=$(echo "scale=2; $TOTAL_REQUESTS / $TOTAL_TIME" | bc)
|
| 123 |
+
echo -e " ๅๅ้: ${GREEN}${THROUGHPUT}${NC} req/s"
|
| 124 |
+
echo ""
|
| 125 |
+
|
| 126 |
+
# ๅปถ่ฟ็ป่ฎก๏ผๅช็ป่ฎกๆๅ็่ฏทๆฑ๏ผ
|
| 127 |
+
if [ $SUCCESS_COUNT -gt 0 ]; then
|
| 128 |
+
echo -e " ๅปถ่ฟ็ป่ฎก:"
|
| 129 |
+
|
| 130 |
+
# ๆๅๆๅ่ฏทๆฑ็ๅปถ่ฟๆถ้ด
|
| 131 |
+
awk -F',' '$2 == 200 {print $3}' "$TMP_DIR/results.csv" | sort -n > "$TMP_DIR/latencies.txt"
|
| 132 |
+
|
| 133 |
+
MIN=$(head -n 1 "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
|
| 134 |
+
MAX=$(tail -n 1 "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
|
| 135 |
+
AVG=$(awk '{sum+=$1; count++} END {printf "%.0f", sum/count*1000}' "$TMP_DIR/latencies.txt")
|
| 136 |
+
|
| 137 |
+
# P50
|
| 138 |
+
P50_LINE=$((SUCCESS_COUNT / 2))
|
| 139 |
+
P50=$(sed -n "${P50_LINE}p" "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
|
| 140 |
+
|
| 141 |
+
# P95
|
| 142 |
+
P95_LINE=$(echo "scale=0; $SUCCESS_COUNT * 0.95 / 1" | bc)
|
| 143 |
+
P95=$(sed -n "${P95_LINE}p" "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
|
| 144 |
+
|
| 145 |
+
# P99
|
| 146 |
+
P99_LINE=$(echo "scale=0; $SUCCESS_COUNT * 0.99 / 1" | bc)
|
| 147 |
+
P99=$(sed -n "${P99_LINE}p" "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
|
| 148 |
+
|
| 149 |
+
echo -e " ๆๅฐ: ${MIN}ms"
|
| 150 |
+
echo -e " ๅนณๅ: ${AVG}ms"
|
| 151 |
+
echo -e " ๆๅคง: ${MAX}ms"
|
| 152 |
+
echo -e " P50: ${P50}ms"
|
| 153 |
+
echo -e " P95: ${P95}ms"
|
| 154 |
+
echo -e " P99: ${P99}ms"
|
| 155 |
+
fi
|
| 156 |
+
|
| 157 |
+
echo -e "${BLUE}โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ${NC}"
|
| 158 |
+
|
| 159 |
+
# ๆง่ฝ่ฏ็บง
|
| 160 |
+
echo -e "${YELLOW}๐ฏ ๆง่ฝ่ฏ็บง:${NC}"
|
| 161 |
+
|
| 162 |
+
if (( $(echo "$THROUGHPUT >= 100" | bc -l) )); then
|
| 163 |
+
RATING="โญโญโญโญโญ ไผ็ง"
|
| 164 |
+
elif (( $(echo "$THROUGHPUT >= 60" | bc -l) )); then
|
| 165 |
+
RATING="โญโญโญโญ ่ฏๅฅฝ"
|
| 166 |
+
elif (( $(echo "$THROUGHPUT >= 30" | bc -l) )); then
|
| 167 |
+
RATING="โญโญโญ ไธญ็ญ"
|
| 168 |
+
elif (( $(echo "$THROUGHPUT >= 10" | bc -l) )); then
|
| 169 |
+
RATING="โญโญ ่พไฝ"
|
| 170 |
+
else
|
| 171 |
+
RATING="โญ ้ไผๅ"
|
| 172 |
+
fi
|
| 173 |
+
|
| 174 |
+
echo -e " ๅๅ้ (${THROUGHPUT} req/s): ${RATING}"
|
| 175 |
+
|
| 176 |
+
echo ""
|
| 177 |
+
echo -e "${GREEN}โ
ๆต่ฏๅฎๆ๏ผ${NC}"
|
test_key.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import requests
|
| 2 |
+
import json
|
| 3 |
+
import uuid
|
| 4 |
+
|
| 5 |
+
# ================= ้
็ฝฎๅบ =================
|
| 6 |
+
# 1. ๅจ็ฎก็ๅๅฐ [Key ็ฎก็] ้กต้ขๅๅปบไธไธชๆฐ Key
|
| 7 |
+
# 2. ๅฐๆฐ็ๆ็ sk-... ๅกซๅ
ฅไธๆน
|
| 8 |
+
API_KEY = "YOUR_NEW_API_KEY"
|
| 9 |
+
BASE_URL = "http://127.0.0.1:8000"
|
| 10 |
+
# ==========================================
|
| 11 |
+
|
| 12 |
+
def test_chat_completion():
|
| 13 |
+
print(f"ๅผๅงๆต่ฏ Key: {API_KEY[:10]}...")
|
| 14 |
+
|
| 15 |
+
url = f"{BASE_URL}/v1/chat/completions"
|
| 16 |
+
headers = {
|
| 17 |
+
"Authorization": f"Bearer {API_KEY}",
|
| 18 |
+
"Content-Type": "application/json"
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
payload = {
|
| 22 |
+
"model": "grok-4-fast",
|
| 23 |
+
"messages": [
|
| 24 |
+
{"role": "user", "content": "Hello, who are you? Tell me a joke."}
|
| 25 |
+
],
|
| 26 |
+
"stream": False
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
try:
|
| 30 |
+
response = requests.post(url, headers=headers, json=payload, timeout=30)
|
| 31 |
+
print(f"็ถๆ็ : {response.status_code}")
|
| 32 |
+
|
| 33 |
+
if response.status_code == 200:
|
| 34 |
+
result = response.json()
|
| 35 |
+
content = result['choices'][0]['message']['content']
|
| 36 |
+
print("--- ๅๅบๆๅ ---")
|
| 37 |
+
print(content)
|
| 38 |
+
print("---------------")
|
| 39 |
+
print("ๆต่ฏ้่ฟ๏ผ็ฐๅจๅป็ฎก็ๅๅฐ [ๆฅๅฟๅฎก่ฎก] ็กฎ่ฎคๆฅๅฟไธญๆฏๅฆ่ฎฐๅฝไบ่ฏฅ่ฏทๆฑใ")
|
| 40 |
+
else:
|
| 41 |
+
print(f"่ฏทๆฑๅคฑ่ดฅ: {response.text}")
|
| 42 |
+
|
| 43 |
+
except Exception as e:
|
| 44 |
+
print(f"ๅ็้่ฏฏ: {e}")
|
| 45 |
+
|
| 46 |
+
if __name__ == "__main__":
|
| 47 |
+
if API_KEY == "YOUR_NEW_API_KEY":
|
| 48 |
+
print("่ฏทๅ
ๅฐไปฃ็ ไธญ็ API_KEY ๆฟๆขไธบไฝ ๅๆ็ๆ็ Key๏ผ")
|
| 49 |
+
else:
|
| 50 |
+
test_chat_completion()
|
uv.lock
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|