JXJBing commited on
Commit
1a9e2c2
ยท
verified ยท
1 Parent(s): 07e4442

Upload 45 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ app/template/favicon.png filter=lfs diff=lfs merge=lfs -text
37
+ data/temp/video/users-8522ce45-679b-4e0e-a0f7-bb18f434eb6b-generated-15f7113f-5d16-4ff1-bdaa-a2eabd66671c-generated_video.mp4 filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ๆž„ๅปบ้˜ถๆฎต
2
+ FROM python:3.11-slim AS builder
3
+
4
+ WORKDIR /build
5
+
6
+ # ๅฎ‰่ฃ…ไพ่ต–ๅˆฐ็‹ฌ็ซ‹็›ฎๅฝ•
7
+ COPY requirements.txt .
8
+ RUN pip install --no-cache-dir --only-binary=:all: --prefix=/install -r requirements.txt && \
9
+ find /install -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true && \
10
+ find /install -type d -name "tests" -exec rm -rf {} + 2>/dev/null || true && \
11
+ find /install -type d -name "test" -exec rm -rf {} + 2>/dev/null || true && \
12
+ find /install -type d -name "*.dist-info" -exec sh -c 'rm -f "$1"/RECORD "$1"/INSTALLER' _ {} \; && \
13
+ find /install -type f -name "*.pyc" -delete && \
14
+ find /install -type f -name "*.pyo" -delete && \
15
+ find /install -name "*.so" -exec strip --strip-unneeded {} \; 2>/dev/null || true
16
+
17
+ # ่ฟ่กŒ้˜ถๆฎต - ไฝฟ็”จๆœ€ๅฐ้•œๅƒ
18
+ FROM python:3.11-slim
19
+
20
+ WORKDIR /app
21
+
22
+ # ๆธ…็†ๅŸบ็ก€้•œๅƒไธญ็š„ๅ†—ไฝ™ๆ–‡ไปถ
23
+ RUN rm -rf /usr/share/doc/* \
24
+ /usr/share/man/* \
25
+ /usr/share/locale/* \
26
+ /var/cache/apt/* \
27
+ /var/lib/apt/lists/* \
28
+ /tmp/* \
29
+ /var/tmp/*
30
+
31
+ # ไปŽๆž„ๅปบ้˜ถๆฎตๅคๅˆถๅทฒๅฎ‰่ฃ…็š„ๅŒ…
32
+ COPY --from=builder /install /usr/local
33
+
34
+ # ๅˆ›ๅปบๅฟ…่ฆ็š„็›ฎๅฝ•๏ผˆๅŒ…ๆ‹ฌ็”จไบŽๆŒ‚่ฝฝ็š„data็›ฎๅฝ•๏ผ‰
35
+ RUN mkdir -p /app/logs /app/data/temp/image /app/data/temp/video
36
+
37
+ # ๅคๅˆถๅบ”็”จไปฃ็ 
38
+ COPY app/ ./app/
39
+ COPY main.py .
40
+
41
+ # ๅคๅˆถๅนถ่ฎพ็ฝฎ entrypoint ่„šๆœฌ
42
+ COPY docker-entrypoint.sh /usr/local/bin/
43
+ RUN chmod +x /usr/local/bin/docker-entrypoint.sh
44
+
45
+ # ๅˆ ้™ค Python ๅญ—่Š‚็ ๅ’Œ็ผ“ๅญ˜
46
+ ENV PYTHONDONTWRITEBYTECODE=1 \
47
+ PYTHONUNBUFFERED=1
48
+
49
+ EXPOSE 8000
50
+
51
+ # ไฝฟ็”จ entrypoint ่„šๆœฌๅˆๅง‹ๅŒ–้…็ฝฎ
52
+ ENTRYPOINT ["docker-entrypoint.sh"]
53
+
54
+ # ้ป˜่ฎคๅฏๅŠจๅ‘ฝไปค
55
+ CMD ["python", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Chenyme
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
app/.DS_Store ADDED
Binary file (6.15 kB). View file
 
app/api/admin/manage.py ADDED
@@ -0,0 +1,1017 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """็ฎก็†ๆŽฅๅฃ - Token็ฎก็†ๅ’Œ็ณป็ปŸ้…็ฝฎ"""
2
+
3
+ import secrets
4
+ import time
5
+ from typing import Dict, Any, List, Optional
6
+ from datetime import datetime, timedelta
7
+ from pathlib import Path
8
+ from fastapi import APIRouter, HTTPException, Depends, Header, Query
9
+ from fastapi.responses import HTMLResponse
10
+ from pydantic import BaseModel
11
+
12
+ from app.core.config import setting
13
+ from app.core.logger import logger
14
+ from app.services.grok.token import token_manager
15
+ from app.services.request_stats import request_stats
16
+ from app.models.grok_models import TokenType
17
+
18
+
19
+ router = APIRouter(tags=["็ฎก็†"])
20
+
21
+ # ๅธธ้‡
22
+ STATIC_DIR = Path(__file__).parents[2] / "template"
23
+ TEMP_DIR = Path(__file__).parents[3] / "data" / "temp"
24
+ IMAGE_CACHE_DIR = TEMP_DIR / "image"
25
+ VIDEO_CACHE_DIR = TEMP_DIR / "video"
26
+ SESSION_EXPIRE_HOURS = 24
27
+ BYTES_PER_KB = 1024
28
+ BYTES_PER_MB = 1024 * 1024
29
+
30
+ # ไผš่ฏๅญ˜ๅ‚จ
31
+ _sessions: Dict[str, datetime] = {}
32
+
33
+
34
+ # === ่ฏทๆฑ‚/ๅ“ๅบ”ๆจกๅž‹ ===
35
+
36
+ class LoginRequest(BaseModel):
37
+ username: str
38
+ password: str
39
+
40
+
41
+ class LoginResponse(BaseModel):
42
+ success: bool
43
+ token: Optional[str] = None
44
+ message: str
45
+
46
+
47
+ class AddTokensRequest(BaseModel):
48
+ tokens: List[str]
49
+ token_type: str
50
+
51
+
52
+ class DeleteTokensRequest(BaseModel):
53
+ tokens: List[str]
54
+ token_type: str
55
+
56
+
57
+ class TokenInfo(BaseModel):
58
+ token: str
59
+ token_type: str
60
+ created_time: Optional[int] = None
61
+ remaining_queries: int
62
+ heavy_remaining_queries: int
63
+ status: str
64
+ tags: List[str] = []
65
+ note: str = ""
66
+ cooldown_until: Optional[int] = None
67
+ cooldown_remaining: int = 0
68
+ last_failure_time: Optional[int] = None
69
+ last_failure_reason: str = ""
70
+ limit_reason: str = ""
71
+
72
+
73
+ class TokenListResponse(BaseModel):
74
+ success: bool
75
+ data: List[TokenInfo]
76
+ total: int
77
+
78
+
79
+ class UpdateSettingsRequest(BaseModel):
80
+ global_config: Optional[Dict[str, Any]] = None
81
+ grok_config: Optional[Dict[str, Any]] = None
82
+
83
+
84
+ class UpdateTokenTagsRequest(BaseModel):
85
+ token: str
86
+ token_type: str
87
+ tags: List[str]
88
+
89
+
90
+ class UpdateTokenNoteRequest(BaseModel):
91
+ token: str
92
+ token_type: str
93
+ note: str
94
+
95
+
96
+ class TestTokenRequest(BaseModel):
97
+ token: str
98
+ token_type: str
99
+
100
+
101
+ # === ่พ…ๅŠฉๅ‡ฝๆ•ฐ ===
102
+
103
+ def validate_token_type(token_type_str: str) -> TokenType:
104
+ """้ชŒ่ฏToken็ฑปๅž‹"""
105
+ if token_type_str not in ["sso", "ssoSuper"]:
106
+ raise HTTPException(
107
+ status_code=400,
108
+ detail={"error": "ๆ— ๆ•ˆ็š„Token็ฑปๅž‹", "code": "INVALID_TYPE"}
109
+ )
110
+ return TokenType.NORMAL if token_type_str == "sso" else TokenType.SUPER
111
+
112
+
113
+ def parse_created_time(created_time) -> Optional[int]:
114
+ """่งฃๆžๅˆ›ๅปบๆ—ถ้—ด"""
115
+ if isinstance(created_time, str):
116
+ return int(created_time) if created_time else None
117
+ elif isinstance(created_time, int):
118
+ return created_time
119
+ return None
120
+
121
+
122
+ def _get_cooldown_remaining_ms(token_data: Dict[str, Any], now_ms: Optional[int] = None) -> int:
123
+ """่Žทๅ–ๅ†ทๅดๅ‰ฉไฝ™ๆ—ถ้—ด๏ผˆๆฏซ็ง’๏ผ‰."""
124
+ cooldown_until = token_data.get("cooldownUntil")
125
+ if not cooldown_until:
126
+ return 0
127
+
128
+ try:
129
+ now = now_ms if now_ms is not None else int(time.time() * 1000)
130
+ remaining = int(cooldown_until) - now
131
+ return remaining if remaining > 0 else 0
132
+ except (TypeError, ValueError):
133
+ return 0
134
+
135
+
136
+ def _is_token_in_cooldown(token_data: Dict[str, Any], now_ms: Optional[int] = None) -> bool:
137
+ """ๅˆคๆ–ญTokenๆ˜ฏๅฆๅค„ไบŽ429ๅ†ทๅดไธญ."""
138
+ return _get_cooldown_remaining_ms(token_data, now_ms) > 0
139
+
140
+
141
+ def calculate_token_stats(tokens: Dict[str, Any], token_type: str) -> Dict[str, int]:
142
+ """่ฎก็ฎ—Token็ปŸ่ฎก."""
143
+ total = len(tokens)
144
+ expired = sum(1 for t in tokens.values() if t.get("status") == "expired")
145
+ now_ms = int(time.time() * 1000)
146
+ cooldown = 0
147
+ exhausted = 0
148
+ unused = 0
149
+ active = 0
150
+
151
+ for token_data in tokens.values():
152
+ if token_data.get("status") == "expired":
153
+ continue
154
+
155
+ if _is_token_in_cooldown(token_data, now_ms):
156
+ cooldown += 1
157
+ continue
158
+
159
+ remaining = token_data.get("remainingQueries", -1)
160
+ heavy_remaining = token_data.get("heavyremainingQueries", -1)
161
+
162
+ if token_type == "normal":
163
+ if remaining == -1:
164
+ unused += 1
165
+ elif remaining == 0:
166
+ exhausted += 1
167
+ else:
168
+ active += 1
169
+ else:
170
+ if remaining == -1 and heavy_remaining == -1:
171
+ unused += 1
172
+ elif remaining == 0 or heavy_remaining == 0:
173
+ exhausted += 1
174
+ else:
175
+ active += 1
176
+
177
+ limited = cooldown + exhausted
178
+ return {
179
+ "total": total,
180
+ "unused": unused,
181
+ "limited": limited,
182
+ "cooldown": cooldown,
183
+ "exhausted": exhausted,
184
+ "expired": expired,
185
+ "active": active
186
+ }
187
+
188
+
189
+ def verify_admin_session(authorization: Optional[str] = Header(None)) -> bool:
190
+ """้ชŒ่ฏ็ฎก็†ๅ‘˜ไผš่ฏ"""
191
+ if not authorization or not authorization.startswith("Bearer "):
192
+ raise HTTPException(status_code=401, detail={"error": "ๆœชๆŽˆๆƒ่ฎฟ้—ฎ", "code": "UNAUTHORIZED"})
193
+
194
+ token = authorization[7:]
195
+
196
+ if token not in _sessions:
197
+ raise HTTPException(status_code=401, detail={"error": "ไผš่ฏๆ— ๆ•ˆ", "code": "SESSION_INVALID"})
198
+
199
+ if datetime.now() > _sessions[token]:
200
+ del _sessions[token]
201
+ raise HTTPException(status_code=401, detail={"error": "ไผš่ฏๅทฒ่ฟ‡ๆœŸ", "code": "SESSION_EXPIRED"})
202
+
203
+ return True
204
+
205
+
206
+ def get_token_status(token_data: Dict[str, Any], token_type: str) -> str:
207
+ """่Žทๅ–Token็Šถๆ€."""
208
+ if token_data.get("status") == "expired":
209
+ return "ๅคฑๆ•ˆ"
210
+
211
+ if _is_token_in_cooldown(token_data):
212
+ return "ๅ†ทๅดไธญ"
213
+
214
+ remaining = token_data.get("remainingQueries", -1)
215
+ heavy_remaining = token_data.get("heavyremainingQueries", -1)
216
+
217
+ if token_type == "ssoSuper":
218
+ if remaining == -1 and heavy_remaining == -1:
219
+ return "ๆœชไฝฟ็”จ"
220
+ if remaining == 0 or heavy_remaining == 0:
221
+ return "้ขๅบฆ่€—ๅฐฝ"
222
+ return "ๆญฃๅธธ"
223
+
224
+ if remaining == -1:
225
+ return "ๆœชไฝฟ็”จ"
226
+ if remaining == 0:
227
+ return "้ขๅบฆ่€—ๅฐฝ"
228
+ return "ๆญฃๅธธ"
229
+
230
+
231
+ def _calculate_dir_size(directory: Path) -> int:
232
+ """่ฎก็ฎ—็›ฎๅฝ•ๅคงๅฐ"""
233
+ total = 0
234
+ for file_path in directory.iterdir():
235
+ if file_path.is_file():
236
+ try:
237
+ total += file_path.stat().st_size
238
+ except Exception as e:
239
+ logger.warning(f"[Admin] ๆ— ๆณ•่Žทๅ–ๆ–‡ไปถๅคงๅฐ: {file_path.name}, {e}")
240
+ return total
241
+
242
+
243
+ def _format_size(size_bytes: int) -> str:
244
+ """ๆ ผๅผๅŒ–ๆ–‡ไปถๅคงๅฐ"""
245
+ size_mb = size_bytes / BYTES_PER_MB
246
+ if size_mb < 1:
247
+ return f"{size_bytes / BYTES_PER_KB:.1f} KB"
248
+ return f"{size_mb:.1f} MB"
249
+
250
+
251
+ # === ้กต้ข่ทฏ็”ฑ ===
252
+
253
+ @router.get("/login", response_class=HTMLResponse)
254
+ async def login_page():
255
+ """็™ปๅฝ•้กต้ข"""
256
+ login_html = STATIC_DIR / "login.html"
257
+ if login_html.exists():
258
+ return login_html.read_text(encoding="utf-8")
259
+ raise HTTPException(status_code=404, detail="็™ปๅฝ•้กต้ขไธๅญ˜ๅœจ")
260
+
261
+
262
+ @router.get("/manage", response_class=HTMLResponse)
263
+ async def manage_page():
264
+ """็ฎก็†้กต้ข"""
265
+ admin_html = STATIC_DIR / "admin.html"
266
+ if admin_html.exists():
267
+ return admin_html.read_text(encoding="utf-8")
268
+ raise HTTPException(status_code=404, detail="็ฎก็†้กต้ขไธๅญ˜ๅœจ")
269
+
270
+
271
+ # === API็ซฏ็‚น ===
272
+
273
+ @router.post("/api/login", response_model=LoginResponse)
274
+ async def admin_login(request: LoginRequest) -> LoginResponse:
275
+ """็ฎก็†ๅ‘˜็™ปๅฝ•"""
276
+ try:
277
+ logger.debug(f"[Admin] ็™ปๅฝ•ๅฐ่ฏ•: {request.username}")
278
+
279
+ expected_user = setting.global_config.get("admin_username", "")
280
+ expected_pass = setting.global_config.get("admin_password", "")
281
+
282
+ if request.username != expected_user or request.password != expected_pass:
283
+ logger.warning(f"[Admin] ็™ปๅฝ•ๅคฑ่ดฅ: {request.username}")
284
+ return LoginResponse(success=False, message="็”จๆˆทๅๆˆ–ๅฏ†็ ้”™่ฏฏ")
285
+
286
+ session_token = secrets.token_urlsafe(32)
287
+ _sessions[session_token] = datetime.now() + timedelta(hours=SESSION_EXPIRE_HOURS)
288
+
289
+ logger.debug(f"[Admin] ็™ปๅฝ•ๆˆๅŠŸ: {request.username}")
290
+ return LoginResponse(success=True, token=session_token, message="็™ปๅฝ•ๆˆๅŠŸ")
291
+
292
+ except Exception as e:
293
+ logger.error(f"[Admin] ็™ปๅฝ•ๅผ‚ๅธธ: {e}")
294
+ raise HTTPException(status_code=500, detail={"error": f"็™ปๅฝ•ๅคฑ่ดฅ: {e}", "code": "LOGIN_ERROR"})
295
+
296
+
297
+ @router.post("/api/logout")
298
+ async def admin_logout(_: bool = Depends(verify_admin_session), authorization: Optional[str] = Header(None)) -> Dict[str, Any]:
299
+ """็ฎก็†ๅ‘˜็™ปๅ‡บ"""
300
+ try:
301
+ if authorization and authorization.startswith("Bearer "):
302
+ token = authorization[7:]
303
+ if token in _sessions:
304
+ del _sessions[token]
305
+ logger.debug("[Admin] ็™ปๅ‡บๆˆๅŠŸ")
306
+ return {"success": True, "message": "็™ปๅ‡บๆˆๅŠŸ"}
307
+
308
+ logger.warning("[Admin] ็™ปๅ‡บๅคฑ่ดฅ: ๆ— ๆ•ˆไผš่ฏ")
309
+ return {"success": False, "message": "ๆ— ๆ•ˆ็š„ไผš่ฏ"}
310
+
311
+ except Exception as e:
312
+ logger.error(f"[Admin] ็™ปๅ‡บๅผ‚ๅธธ: {e}")
313
+ raise HTTPException(status_code=500, detail={"error": f"็™ปๅ‡บๅคฑ่ดฅ: {e}", "code": "LOGOUT_ERROR"})
314
+
315
+
316
+ @router.get("/api/tokens", response_model=TokenListResponse)
317
+ async def list_tokens(_: bool = Depends(verify_admin_session)) -> TokenListResponse:
318
+ """่Žทๅ–Tokenๅˆ—่กจ"""
319
+ try:
320
+ logger.debug("[Admin] ่Žทๅ–Tokenๅˆ—่กจ")
321
+
322
+ all_tokens = token_manager.get_tokens()
323
+ token_list: List[TokenInfo] = []
324
+ now_ms = int(time.time() * 1000)
325
+
326
+ # ๆ™ฎ้€šToken
327
+ for token, data in all_tokens.get(TokenType.NORMAL.value, {}).items():
328
+ cooldown_remaining_ms = _get_cooldown_remaining_ms(data, now_ms)
329
+ cooldown_until = data.get("cooldownUntil") if cooldown_remaining_ms else None
330
+ limit_reason = "cooldown" if cooldown_remaining_ms else ""
331
+ if not limit_reason and data.get("remainingQueries", -1) == 0:
332
+ limit_reason = "exhausted"
333
+ token_list.append(TokenInfo(
334
+ token=token,
335
+ token_type="sso",
336
+ created_time=parse_created_time(data.get("createdTime")),
337
+ remaining_queries=data.get("remainingQueries", -1),
338
+ heavy_remaining_queries=data.get("heavyremainingQueries", -1),
339
+ status=get_token_status(data, "sso"),
340
+ tags=data.get("tags", []),
341
+ note=data.get("note", ""),
342
+ cooldown_until=cooldown_until,
343
+ cooldown_remaining=(cooldown_remaining_ms + 999) // 1000 if cooldown_remaining_ms else 0,
344
+ last_failure_time=data.get("lastFailureTime") or None,
345
+ last_failure_reason=data.get("lastFailureReason") or "",
346
+ limit_reason=limit_reason
347
+ ))
348
+
349
+ # Super Token
350
+ for token, data in all_tokens.get(TokenType.SUPER.value, {}).items():
351
+ cooldown_remaining_ms = _get_cooldown_remaining_ms(data, now_ms)
352
+ cooldown_until = data.get("cooldownUntil") if cooldown_remaining_ms else None
353
+ limit_reason = "cooldown" if cooldown_remaining_ms else ""
354
+ if not limit_reason and (data.get("remainingQueries", -1) == 0 or data.get("heavyremainingQueries", -1) == 0):
355
+ limit_reason = "exhausted"
356
+ token_list.append(TokenInfo(
357
+ token=token,
358
+ token_type="ssoSuper",
359
+ created_time=parse_created_time(data.get("createdTime")),
360
+ remaining_queries=data.get("remainingQueries", -1),
361
+ heavy_remaining_queries=data.get("heavyremainingQueries", -1),
362
+ status=get_token_status(data, "ssoSuper"),
363
+ tags=data.get("tags", []),
364
+ note=data.get("note", ""),
365
+ cooldown_until=cooldown_until,
366
+ cooldown_remaining=(cooldown_remaining_ms + 999) // 1000 if cooldown_remaining_ms else 0,
367
+ last_failure_time=data.get("lastFailureTime") or None,
368
+ last_failure_reason=data.get("lastFailureReason") or "",
369
+ limit_reason=limit_reason
370
+ ))
371
+
372
+ logger.debug(f"[Admin] Tokenๅˆ—่กจ่Žทๅ–ๆˆๅŠŸ: {len(token_list)}ไธช")
373
+ return TokenListResponse(success=True, data=token_list, total=len(token_list))
374
+
375
+ except Exception as e:
376
+ logger.error(f"[Admin] ่Žทๅ–Tokenๅˆ—่กจๅผ‚ๅธธ: {e}")
377
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "LIST_ERROR"})
378
+
379
+
380
+ @router.post("/api/tokens/add")
381
+ async def add_tokens(request: AddTokensRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
382
+ """ๆ‰น้‡ๆทปๅŠ Token"""
383
+ try:
384
+ logger.debug(f"[Admin] ๆทปๅŠ Token: {request.token_type}, {len(request.tokens)}ไธช")
385
+
386
+ token_type = validate_token_type(request.token_type)
387
+ await token_manager.add_token(request.tokens, token_type)
388
+
389
+ logger.debug(f"[Admin] TokenๆทปๅŠ ๆˆๅŠŸ: {len(request.tokens)}ไธช")
390
+ return {"success": True, "message": f"ๆˆๅŠŸๆทปๅŠ  {len(request.tokens)} ไธชToken", "count": len(request.tokens)}
391
+
392
+ except HTTPException:
393
+ raise
394
+ except Exception as e:
395
+ logger.error(f"[Admin] TokenๆทปๅŠ ๅผ‚ๅธธ: {e}")
396
+ raise HTTPException(status_code=500, detail={"error": f"ๆทปๅŠ ๅคฑ่ดฅ: {e}", "code": "ADD_ERROR"})
397
+
398
+
399
+ @router.post("/api/tokens/delete")
400
+ async def delete_tokens(request: DeleteTokensRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
401
+ """ๆ‰น้‡ๅˆ ้™คToken"""
402
+ try:
403
+ logger.debug(f"[Admin] ๅˆ ้™คToken: {request.token_type}, {len(request.tokens)}ไธช")
404
+
405
+ token_type = validate_token_type(request.token_type)
406
+ await token_manager.delete_token(request.tokens, token_type)
407
+
408
+ logger.debug(f"[Admin] Tokenๅˆ ้™คๆˆๅŠŸ: {len(request.tokens)}ไธช")
409
+ return {"success": True, "message": f"ๆˆๅŠŸๅˆ ้™ค {len(request.tokens)} ไธชToken", "count": len(request.tokens)}
410
+
411
+ except HTTPException:
412
+ raise
413
+ except Exception as e:
414
+ logger.error(f"[Admin] Tokenๅˆ ้™คๅผ‚ๅธธ: {e}")
415
+ raise HTTPException(status_code=500, detail={"error": f"ๅˆ ้™คๅคฑ่ดฅ: {e}", "code": "DELETE_ERROR"})
416
+
417
+
418
+ @router.get("/api/settings")
419
+ async def get_settings(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
420
+ """่Žทๅ–้…็ฝฎ"""
421
+ try:
422
+ logger.debug("[Admin] ่Žทๅ–้…็ฝฎ")
423
+ return {"success": True, "data": {"global": setting.global_config, "grok": setting.grok_config}}
424
+ except Exception as e:
425
+ logger.error(f"[Admin] ่Žทๅ–้…็ฝฎๅคฑ่ดฅ: {e}")
426
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "GET_SETTINGS_ERROR"})
427
+
428
+
429
+ @router.post("/api/settings")
430
+ async def update_settings(request: UpdateSettingsRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
431
+ """ๆ›ดๆ–ฐ้…็ฝฎ"""
432
+ try:
433
+ logger.debug("[Admin] ๆ›ดๆ–ฐ้…็ฝฎ")
434
+ await setting.save(global_config=request.global_config, grok_config=request.grok_config)
435
+ logger.debug("[Admin] ้…็ฝฎๆ›ดๆ–ฐๆˆๅŠŸ")
436
+ return {"success": True, "message": "้…็ฝฎๆ›ดๆ–ฐๆˆๅŠŸ"}
437
+ except Exception as e:
438
+ logger.error(f"[Admin] ๆ›ดๆ–ฐ้…็ฝฎๅคฑ่ดฅ: {e}")
439
+ raise HTTPException(status_code=500, detail={"error": f"ๆ›ดๆ–ฐๅคฑ่ดฅ: {e}", "code": "UPDATE_SETTINGS_ERROR"})
440
+
441
+
442
+ @router.get("/api/cache/size")
443
+ async def get_cache_size(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
444
+ """่Žทๅ–็ผ“ๅญ˜ๅคงๅฐ"""
445
+ try:
446
+ logger.debug("[Admin] ่Žทๅ–็ผ“ๅญ˜ๅคงๅฐ")
447
+
448
+ image_size = _calculate_dir_size(IMAGE_CACHE_DIR) if IMAGE_CACHE_DIR.exists() else 0
449
+ video_size = _calculate_dir_size(VIDEO_CACHE_DIR) if VIDEO_CACHE_DIR.exists() else 0
450
+ total_size = image_size + video_size
451
+
452
+ logger.debug(f"[Admin] ็ผ“ๅญ˜ๅคงๅฐ: ๅ›พ็‰‡{_format_size(image_size)}, ่ง†้ข‘{_format_size(video_size)}")
453
+
454
+ return {
455
+ "success": True,
456
+ "data": {
457
+ "image_size": _format_size(image_size),
458
+ "video_size": _format_size(video_size),
459
+ "total_size": _format_size(total_size),
460
+ "image_size_bytes": image_size,
461
+ "video_size_bytes": video_size,
462
+ "total_size_bytes": total_size
463
+ }
464
+ }
465
+
466
+ except Exception as e:
467
+ logger.error(f"[Admin] ่Žทๅ–็ผ“ๅญ˜ๅคงๅฐๅผ‚ๅธธ: {e}")
468
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "CACHE_SIZE_ERROR"})
469
+
470
+
471
+ @router.get("/api/cache/list")
472
+ async def list_cache_files(
473
+ cache_type: str = Query("image", alias="type"),
474
+ limit: int = 50,
475
+ offset: int = 0,
476
+ _: bool = Depends(verify_admin_session)
477
+ ) -> Dict[str, Any]:
478
+ """List cached files for admin preview."""
479
+ try:
480
+ cache_type = cache_type.lower()
481
+ if cache_type not in ("image", "video"):
482
+ raise HTTPException(status_code=400, detail={"error": "Invalid cache type", "code": "INVALID_CACHE_TYPE"})
483
+
484
+ if limit < 1:
485
+ limit = 1
486
+ if limit > 200:
487
+ limit = 200
488
+ if offset < 0:
489
+ offset = 0
490
+
491
+ cache_dir = IMAGE_CACHE_DIR if cache_type == "image" else VIDEO_CACHE_DIR
492
+ if not cache_dir.exists():
493
+ return {"success": True, "data": {"total": 0, "items": [], "offset": offset, "limit": limit, "has_more": False}}
494
+
495
+ files = []
496
+ for file_path in cache_dir.iterdir():
497
+ if not file_path.is_file():
498
+ continue
499
+ try:
500
+ stat = file_path.stat()
501
+ except Exception as e:
502
+ logger.warning(f"[Admin] Skip cache file: {file_path.name}, {e}")
503
+ continue
504
+ files.append((file_path, stat.st_mtime, stat.st_size))
505
+
506
+ files.sort(key=lambda item: item[1], reverse=True)
507
+ total = len(files)
508
+ sliced = files[offset:offset + limit]
509
+
510
+ items = [
511
+ {
512
+ "name": file_path.name,
513
+ "size": _format_size(size),
514
+ "size_bytes": size,
515
+ "mtime": int(mtime * 1000),
516
+ "url": f"/images/{file_path.name}",
517
+ "type": cache_type
518
+ }
519
+ for file_path, mtime, size in sliced
520
+ ]
521
+
522
+ return {
523
+ "success": True,
524
+ "data": {
525
+ "total": total,
526
+ "items": items,
527
+ "offset": offset,
528
+ "limit": limit,
529
+ "has_more": offset + limit < total
530
+ }
531
+ }
532
+
533
+ except HTTPException:
534
+ raise
535
+ except Exception as e:
536
+ logger.error(f"[Admin] ่Žทๅ–็ผ“ๅญ˜ๅˆ—่กจๅผ‚ๅธธ: {e}")
537
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "CACHE_LIST_ERROR"})
538
+
539
+
540
+ @router.post("/api/cache/clear")
541
+ async def clear_cache(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
542
+ """ๆธ…็†ๆ‰€ๆœ‰็ผ“ๅญ˜"""
543
+ try:
544
+ logger.debug("[Admin] ๆธ…็†็ผ“ๅญ˜")
545
+
546
+ image_count = 0
547
+ video_count = 0
548
+
549
+ # ๆธ…็†ๅ›พ็‰‡
550
+ if IMAGE_CACHE_DIR.exists():
551
+ for file_path in IMAGE_CACHE_DIR.iterdir():
552
+ if file_path.is_file():
553
+ try:
554
+ file_path.unlink()
555
+ image_count += 1
556
+ except Exception as e:
557
+ logger.error(f"[Admin] ๅˆ ้™คๅคฑ่ดฅ: {file_path.name}, {e}")
558
+
559
+ # ๆธ…็†่ง†้ข‘
560
+ if VIDEO_CACHE_DIR.exists():
561
+ for file_path in VIDEO_CACHE_DIR.iterdir():
562
+ if file_path.is_file():
563
+ try:
564
+ file_path.unlink()
565
+ video_count += 1
566
+ except Exception as e:
567
+ logger.error(f"[Admin] ๅˆ ้™คๅคฑ่ดฅ: {file_path.name}, {e}")
568
+
569
+ total = image_count + video_count
570
+ logger.debug(f"[Admin] ็ผ“ๅญ˜ๆธ…็†ๅฎŒๆˆ: ๅ›พ็‰‡{image_count}, ่ง†้ข‘{video_count}")
571
+
572
+ return {
573
+ "success": True,
574
+ "message": f"ๆˆๅŠŸๆธ…็†็ผ“ๅญ˜๏ผŒๅˆ ้™คๅ›พ็‰‡ {image_count} ไธช๏ผŒ่ง†้ข‘ {video_count} ไธช๏ผŒๅ…ฑ {total} ไธชๆ–‡ไปถ",
575
+ "data": {"deleted_count": total, "image_count": image_count, "video_count": video_count}
576
+ }
577
+
578
+ except Exception as e:
579
+ logger.error(f"[Admin] ๆธ…็†็ผ“ๅญ˜ๅผ‚ๅธธ: {e}")
580
+ raise HTTPException(status_code=500, detail={"error": f"ๆธ…็†ๅคฑ่ดฅ: {e}", "code": "CACHE_CLEAR_ERROR"})
581
+
582
+
583
+ @router.post("/api/cache/clear/images")
584
+ async def clear_image_cache(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
585
+ """ๆธ…็†ๅ›พ็‰‡็ผ“ๅญ˜"""
586
+ try:
587
+ logger.debug("[Admin] ๆธ…็†ๅ›พ็‰‡็ผ“ๅญ˜")
588
+
589
+ count = 0
590
+ if IMAGE_CACHE_DIR.exists():
591
+ for file_path in IMAGE_CACHE_DIR.iterdir():
592
+ if file_path.is_file():
593
+ try:
594
+ file_path.unlink()
595
+ count += 1
596
+ except Exception as e:
597
+ logger.error(f"[Admin] ๅˆ ้™คๅคฑ่ดฅ: {file_path.name}, {e}")
598
+
599
+ logger.debug(f"[Admin] ๅ›พ็‰‡็ผ“ๅญ˜ๆธ…็†ๅฎŒๆˆ: {count}ไธช")
600
+ return {"success": True, "message": f"ๆˆๅŠŸๆธ…็†ๅ›พ็‰‡็ผ“ๅญ˜๏ผŒๅˆ ้™ค {count} ไธชๆ–‡ไปถ", "data": {"deleted_count": count, "type": "images"}}
601
+
602
+ except Exception as e:
603
+ logger.error(f"[Admin] ๆธ…็†ๅ›พ็‰‡็ผ“ๅญ˜ๅผ‚ๅธธ: {e}")
604
+ raise HTTPException(status_code=500, detail={"error": f"ๆธ…็†ๅคฑ่ดฅ: {e}", "code": "IMAGE_CACHE_CLEAR_ERROR"})
605
+
606
+
607
+ @router.post("/api/cache/clear/videos")
608
+ async def clear_video_cache(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
609
+ """ๆธ…็†่ง†้ข‘็ผ“ๅญ˜"""
610
+ try:
611
+ logger.debug("[Admin] ๆธ…็†่ง†้ข‘็ผ“ๅญ˜")
612
+
613
+ count = 0
614
+ if VIDEO_CACHE_DIR.exists():
615
+ for file_path in VIDEO_CACHE_DIR.iterdir():
616
+ if file_path.is_file():
617
+ try:
618
+ file_path.unlink()
619
+ count += 1
620
+ except Exception as e:
621
+ logger.error(f"[Admin] ๅˆ ้™คๅคฑ่ดฅ: {file_path.name}, {e}")
622
+
623
+ logger.debug(f"[Admin] ่ง†้ข‘็ผ“ๅญ˜ๆธ…็†ๅฎŒๆˆ: {count}ไธช")
624
+ return {"success": True, "message": f"ๆˆๅŠŸๆธ…็†่ง†้ข‘็ผ“ๅญ˜๏ผŒๅˆ ้™ค {count} ไธชๆ–‡ไปถ", "data": {"deleted_count": count, "type": "videos"}}
625
+
626
+ except Exception as e:
627
+ logger.error(f"[Admin] ๆธ…็†่ง†้ข‘็ผ“ๅญ˜ๅผ‚ๅธธ: {e}")
628
+ raise HTTPException(status_code=500, detail={"error": f"ๆธ…็†ๅคฑ่ดฅ: {e}", "code": "VIDEO_CACHE_CLEAR_ERROR"})
629
+
630
+
631
+ @router.get("/api/stats")
632
+ async def get_stats(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
633
+ """่Žทๅ–็ปŸ่ฎกไฟกๆฏ"""
634
+ try:
635
+ logger.debug("[Admin] ๅผ€ๅง‹่Žทๅ–็ปŸ่ฎกไฟกๆฏ")
636
+
637
+ all_tokens = token_manager.get_tokens()
638
+ normal_stats = calculate_token_stats(all_tokens.get(TokenType.NORMAL.value, {}), "normal")
639
+ super_stats = calculate_token_stats(all_tokens.get(TokenType.SUPER.value, {}), "super")
640
+ total = normal_stats["total"] + super_stats["total"]
641
+
642
+ logger.debug(f"[Admin] ็ปŸ่ฎกไฟกๆฏ่Žทๅ–ๆˆๅŠŸ - ๆ™ฎ้€šToken: {normal_stats['total']}, Super Token: {super_stats['total']}, ๆ€ป่ฎก: {total}")
643
+ return {"success": True, "data": {"normal": normal_stats, "super": super_stats, "total": total}}
644
+
645
+ except Exception as e:
646
+ logger.error(f"[Admin] ่Žทๅ–็ปŸ่ฎกไฟกๆฏๅผ‚ๅธธ: {e}")
647
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "STATS_ERROR"})
648
+
649
+
650
+ @router.get("/api/storage/mode")
651
+ async def get_storage_mode(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
652
+ """่Žทๅ–ๅญ˜ๅ‚จๆจกๅผ"""
653
+ try:
654
+ logger.debug("[Admin] ่Žทๅ–ๅญ˜ๅ‚จๆจกๅผ")
655
+ import os
656
+ mode = os.getenv("STORAGE_MODE", "file").upper()
657
+ return {"success": True, "data": {"mode": mode}}
658
+ except Exception as e:
659
+ logger.error(f"[Admin] ่Žทๅ–ๅญ˜ๅ‚จๆจกๅผๅผ‚ๅธธ: {e}")
660
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "STORAGE_MODE_ERROR"})
661
+
662
+
663
+ @router.post("/api/tokens/tags")
664
+ async def update_token_tags(request: UpdateTokenTagsRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
665
+ """ๆ›ดๆ–ฐTokenๆ ‡็ญพ"""
666
+ try:
667
+ logger.debug(f"[Admin] ๆ›ดๆ–ฐTokenๆ ‡็ญพ: {request.token[:10]}..., {request.tags}")
668
+
669
+ token_type = validate_token_type(request.token_type)
670
+ await token_manager.update_token_tags(request.token, token_type, request.tags)
671
+
672
+ logger.debug(f"[Admin] Tokenๆ ‡็ญพๆ›ดๆ–ฐๆˆๅŠŸ: {request.token[:10]}...")
673
+ return {"success": True, "message": "ๆ ‡็ญพๆ›ดๆ–ฐๆˆๅŠŸ", "tags": request.tags}
674
+
675
+ except HTTPException:
676
+ raise
677
+ except Exception as e:
678
+ logger.error(f"[Admin] Tokenๆ ‡็ญพๆ›ดๆ–ฐๅผ‚ๅธธ: {e}")
679
+ raise HTTPException(status_code=500, detail={"error": f"ๆ›ดๆ–ฐๅคฑ่ดฅ: {e}", "code": "UPDATE_TAGS_ERROR"})
680
+
681
+
682
+ @router.get("/api/tokens/tags/all")
683
+ async def get_all_tags(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
684
+ """่Žทๅ–ๆ‰€ๆœ‰ๆ ‡็ญพ"""
685
+ try:
686
+ logger.debug("[Admin] ่Žทๅ–ๆ‰€ๆœ‰ๆ ‡็ญพ")
687
+
688
+ all_tokens = token_manager.get_tokens()
689
+ tags_set = set()
690
+
691
+ for token_type_data in all_tokens.values():
692
+ for token_data in token_type_data.values():
693
+ tags = token_data.get("tags", [])
694
+ if isinstance(tags, list):
695
+ tags_set.update(tags)
696
+
697
+ tags_list = sorted(list(tags_set))
698
+ logger.debug(f"[Admin] ๆ ‡็ญพ่Žทๅ–ๆˆๅŠŸ: {len(tags_list)}ไธช")
699
+ return {"success": True, "data": tags_list}
700
+
701
+ except Exception as e:
702
+ logger.error(f"[Admin] ่Žทๅ–ๆ ‡็ญพๅผ‚ๅธธ: {e}")
703
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}", "code": "GET_TAGS_ERROR"})
704
+
705
+
706
+ @router.post("/api/tokens/note")
707
+ async def update_token_note(request: UpdateTokenNoteRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
708
+ """ๆ›ดๆ–ฐTokenๅค‡ๆณจ"""
709
+ try:
710
+ logger.debug(f"[Admin] ๆ›ดๆ–ฐTokenๅค‡ๆณจ: {request.token[:10]}...")
711
+
712
+ token_type = validate_token_type(request.token_type)
713
+ await token_manager.update_token_note(request.token, token_type, request.note)
714
+
715
+ logger.debug(f"[Admin] Tokenๅค‡ๆณจๆ›ดๆ–ฐๆˆๅŠŸ: {request.token[:10]}...")
716
+ return {"success": True, "message": "ๅค‡ๆณจๆ›ดๆ–ฐๆˆๅŠŸ", "note": request.note}
717
+
718
+ except HTTPException:
719
+ raise
720
+ except Exception as e:
721
+ logger.error(f"[Admin] Tokenๅค‡ๆณจๆ›ดๆ–ฐๅผ‚ๅธธ: {e}")
722
+ raise HTTPException(status_code=500, detail={"error": f"ๆ›ดๆ–ฐๅคฑ่ดฅ: {e}", "code": "UPDATE_NOTE_ERROR"})
723
+
724
+
725
+ @router.post("/api/tokens/test")
726
+ async def test_token(request: TestTokenRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
727
+ """ๆต‹่ฏ•Tokenๅฏ็”จๆ€ง"""
728
+ try:
729
+ logger.debug(f"[Admin] ๆต‹่ฏ•Token: {request.token[:10]}...")
730
+
731
+ token_type = validate_token_type(request.token_type)
732
+ auth_token = f"sso-rw={request.token};sso={request.token}"
733
+
734
+ result = await token_manager.check_limits(auth_token, "grok-4-fast")
735
+
736
+ if result:
737
+ logger.debug(f"[Admin] Tokenๆต‹่ฏ•ๆˆๅŠŸ: {request.token[:10]}...")
738
+ return {
739
+ "success": True,
740
+ "message": "Tokenๆœ‰ๆ•ˆ",
741
+ "data": {
742
+ "valid": True,
743
+ "remaining_queries": result.get("remainingTokens", -1),
744
+ "limit": result.get("limit", -1)
745
+ }
746
+ }
747
+ else:
748
+ logger.warning(f"[Admin] Tokenๆต‹่ฏ•ๅคฑ่ดฅ: {request.token[:10]}...")
749
+
750
+ all_tokens = token_manager.get_tokens()
751
+ token_data = all_tokens.get(token_type.value, {}).get(request.token)
752
+
753
+ if token_data:
754
+ if token_data.get("status") == "expired":
755
+ return {"success": False, "message": "Tokenๅทฒๅคฑๆ•ˆ", "data": {"valid": False, "error_type": "expired", "error_code": 401}}
756
+ cooldown_remaining_ms = _get_cooldown_remaining_ms(token_data)
757
+ if cooldown_remaining_ms:
758
+ return {
759
+ "success": False,
760
+ "message": "Tokenๅค„ไบŽๅ†ทๅดไธญ",
761
+ "data": {
762
+ "valid": False,
763
+ "error_type": "cooldown",
764
+ "error_code": 429,
765
+ "cooldown_remaining": (cooldown_remaining_ms + 999) // 1000
766
+ }
767
+ }
768
+
769
+ exhausted = token_data.get("remainingQueries") == 0
770
+ if token_type == TokenType.SUPER and token_data.get("heavyremainingQueries") == 0:
771
+ exhausted = True
772
+ if exhausted:
773
+ return {
774
+ "success": False,
775
+ "message": "Token้ขๅบฆ่€—ๅฐฝ",
776
+ "data": {"valid": False, "error_type": "exhausted", "error_code": "quota_exhausted"}
777
+ }
778
+ else:
779
+ return {"success": False, "message": "ๆœๅŠกๅ™จ่ขซblockๆˆ–็ฝ‘็ปœ้”™่ฏฏ", "data": {"valid": False, "error_type": "blocked", "error_code": 403}}
780
+ else:
781
+ return {"success": False, "message": "Tokenๆ•ฐๆฎๅผ‚ๅธธ", "data": {"valid": False, "error_type": "unknown", "error_code": "data_error"}}
782
+
783
+ except HTTPException:
784
+ raise
785
+ except Exception as e:
786
+ logger.error(f"[Admin] Tokenๆต‹่ฏ•ๅผ‚ๅธธ: {e}")
787
+ raise HTTPException(status_code=500, detail={"error": f"ๆต‹่ฏ•ๅคฑ่ดฅ: {e}", "code": "TEST_TOKEN_ERROR"})
788
+
789
+
790
+ @router.post("/api/tokens/refresh-all")
791
+ async def refresh_all_tokens(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
792
+ """ไธ€้”ฎๅˆทๆ–ฐๆ‰€ๆœ‰Token็š„ๅ‰ฉไฝ™ๆฌกๆ•ฐ๏ผˆๅŽๅฐๆ‰ง่กŒ๏ผ‰"""
793
+ import asyncio
794
+
795
+ try:
796
+ # ๆฃ€ๆŸฅๆ˜ฏๅฆๅทฒๅœจๅˆทๆ–ฐ
797
+ progress = token_manager.get_refresh_progress()
798
+ if progress.get("running"):
799
+ return {
800
+ "success": False,
801
+ "message": "ๅˆทๆ–ฐไปปๅŠกๆญฃๅœจ่ฟ›่กŒไธญ",
802
+ "data": progress
803
+ }
804
+
805
+ # ๅŽๅฐๅฏๅŠจๅˆทๆ–ฐไปปๅŠก
806
+ logger.info("[Admin] ๅฏๅŠจๅŽๅฐๅˆทๆ–ฐไปปๅŠก")
807
+ asyncio.create_task(token_manager.refresh_all_limits())
808
+
809
+ # ็ซ‹ๅณ่ฟ”ๅ›ž๏ผŒ่ฎฉๅ‰็ซฏ่ฝฎ่ฏข่ฟ›ๅบฆ
810
+ return {
811
+ "success": True,
812
+ "message": "ๅˆทๆ–ฐไปปๅŠกๅทฒๅฏๅŠจ",
813
+ "data": {"started": True}
814
+ }
815
+ except Exception as e:
816
+ logger.error(f"[Admin] ๅˆทๆ–ฐTokenๅผ‚ๅธธ: {e}")
817
+ raise HTTPException(status_code=500, detail={"error": f"ๅˆทๆ–ฐๅคฑ่ดฅ: {e}", "code": "REFRESH_ALL_ERROR"})
818
+
819
+
820
+ @router.get("/api/tokens/refresh-progress")
821
+ async def get_refresh_progress(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
822
+ """่Žทๅ–Tokenๅˆทๆ–ฐ่ฟ›ๅบฆ"""
823
+ try:
824
+ progress = token_manager.get_refresh_progress()
825
+ return {"success": True, "data": progress}
826
+ except Exception as e:
827
+ logger.error(f"[Admin] ่Žทๅ–ๅˆทๆ–ฐ่ฟ›ๅบฆๅผ‚ๅธธ: {e}")
828
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–่ฟ›ๅบฆๅคฑ่ดฅ: {e}"})
829
+
830
+
831
+ @router.get("/api/request-stats")
832
+ async def get_request_stats(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
833
+ """่Žทๅ–่ฏทๆฑ‚็ปŸ่ฎกๆ•ฐๆฎ"""
834
+ try:
835
+ stats = request_stats.get_stats(hours=24, days=7)
836
+ return {"success": True, "data": stats}
837
+ except Exception as e:
838
+ logger.error(f"[Admin] ่Žทๅ–่ฏทๆฑ‚็ปŸ่ฎกๅผ‚ๅธธ: {e}")
839
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–็ปŸ่ฎกๅคฑ่ดฅ: {e}"})
840
+
841
+
842
+ # === API Key ็ฎก็† ===
843
+
844
+ class AddKeyRequest(BaseModel):
845
+ name: str
846
+
847
+
848
+ class UpdateKeyNameRequest(BaseModel):
849
+ key: str
850
+ name: str
851
+
852
+
853
+ class UpdateKeyStatusRequest(BaseModel):
854
+ key: str
855
+ is_active: bool
856
+
857
+
858
+ class BatchAddKeyRequest(BaseModel):
859
+ name_prefix: str
860
+ count: int
861
+
862
+
863
+ class BatchDeleteKeyRequest(BaseModel):
864
+ keys: List[str]
865
+
866
+
867
+ class BatchUpdateKeyStatusRequest(BaseModel):
868
+ keys: List[str]
869
+ is_active: bool
870
+
871
+
872
+ @router.get("/api/keys")
873
+ async def list_keys(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
874
+ """่Žทๅ– Key ๅˆ—่กจ"""
875
+ try:
876
+ from app.services.api_keys import api_key_manager
877
+ if not api_key_manager._loaded:
878
+ await api_key_manager.init()
879
+
880
+ keys = api_key_manager.get_all_keys()
881
+
882
+ # ๆทปๅŠ ้ป˜่ฎค Key (ๅฏ้€‰)
883
+ global_key = setting.grok_config.get("api_key")
884
+ result_keys = []
885
+
886
+ # ่ฝฌๆขๅนถ่„ฑๆ•
887
+ for k in keys:
888
+ result_keys.append({
889
+ **k,
890
+ "display_key": f"{k['key'][:6]}...{k['key'][-4:]}"
891
+ })
892
+
893
+ return {
894
+ "success": True,
895
+ "data": result_keys,
896
+ "global_key_set": bool(global_key)
897
+ }
898
+ except Exception as e:
899
+ logger.error(f"[Admin] ่Žทๅ–Keyๅˆ—่กจๅคฑ่ดฅ: {e}")
900
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}"})
901
+
902
+
903
+ @router.post("/api/keys/add")
904
+ async def add_key(request: AddKeyRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
905
+ """ๆทปๅŠ  Key"""
906
+ try:
907
+ from app.services.api_keys import api_key_manager
908
+ new_key = await api_key_manager.add_key(request.name)
909
+ return {"success": True, "data": new_key, "message": "Keyๅˆ›ๅปบๆˆๅŠŸ"}
910
+ except Exception as e:
911
+ logger.error(f"[Admin] ๆทปๅŠ Keyๅคฑ่ดฅ: {e}")
912
+ raise HTTPException(status_code=500, detail={"error": f"ๆทปๅŠ ๅคฑ่ดฅ: {e}"})
913
+
914
+
915
+ @router.post("/api/keys/delete")
916
+ async def delete_key(request: Dict[str, str], _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
917
+ """ๅˆ ้™ค Key"""
918
+ try:
919
+ from app.services.api_keys import api_key_manager
920
+ key = request.get("key")
921
+ if not key:
922
+ raise ValueError("Key cannot be empty")
923
+
924
+ if await api_key_manager.delete_key(key):
925
+ return {"success": True, "message": "Keyๅˆ ้™คๆˆๅŠŸ"}
926
+ return {"success": False, "message": "Keyไธๅญ˜ๅœจ"}
927
+ except Exception as e:
928
+ logger.error(f"[Admin] ๅˆ ้™คKeyๅคฑ่ดฅ: {e}")
929
+ raise HTTPException(status_code=500, detail={"error": f"ๅˆ ้™คๅคฑ่ดฅ: {e}"})
930
+
931
+
932
+ @router.post("/api/keys/status")
933
+ async def update_key_status(request: UpdateKeyStatusRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
934
+ """ๆ›ดๆ–ฐ Key ็Šถๆ€"""
935
+ try:
936
+ from app.services.api_keys import api_key_manager
937
+ if await api_key_manager.update_key_status(request.key, request.is_active):
938
+ return {"success": True, "message": "็Šถๆ€ๆ›ดๆ–ฐๆˆๅŠŸ"}
939
+ return {"success": False, "message": "Keyไธๅญ˜ๅœจ"}
940
+ except Exception as e:
941
+ logger.error(f"[Admin] ๆ›ดๆ–ฐKey็Šถๆ€ๅคฑ่ดฅ: {e}")
942
+ raise HTTPException(status_code=500, detail={"error": f"ๆ›ดๆ–ฐๅคฑ่ดฅ: {e}"})
943
+
944
+
945
+ @router.post("/api/keys/name")
946
+ async def update_key_name(request: UpdateKeyNameRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
947
+ """ๆ›ดๆ–ฐ Key ๅค‡ๆณจ"""
948
+ try:
949
+ from app.services.api_keys import api_key_manager
950
+ if await api_key_manager.update_key_name(request.key, request.name):
951
+ return {"success": True, "message": "ๅค‡ๆณจๆ›ดๆ–ฐๆˆๅŠŸ"}
952
+ return {"success": False, "message": "Keyไธๅญ˜ๅœจ"}
953
+ except Exception as e:
954
+ logger.error(f"[Admin] ๆ›ดๆ–ฐKeyๅค‡ๆณจๅคฑ่ดฅ: {e}")
955
+ raise HTTPException(status_code=500, detail={"error": f"ๆ›ดๆ–ฐๅคฑ่ดฅ: {e}"})
956
+
957
+
958
+ @router.post("/api/keys/batch-add")
959
+ async def batch_add_keys(request: BatchAddKeyRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
960
+ """ๆ‰น้‡ๆทปๅŠ  Key"""
961
+ try:
962
+ from app.services.api_keys import api_key_manager
963
+ new_keys = await api_key_manager.batch_add_keys(request.name_prefix, request.count)
964
+ return {"success": True, "data": new_keys, "message": f"ๆˆๅŠŸๅˆ›ๅปบ {len(new_keys)} ไธช Key"}
965
+ except Exception as e:
966
+ logger.error(f"[Admin] ๆ‰น้‡ๆทปๅŠ Keyๅคฑ่ดฅ: {e}")
967
+ raise HTTPException(status_code=500, detail={"error": f"ๆ‰น้‡ๆทปๅŠ ๅคฑ่ดฅ: {e}"})
968
+
969
+
970
+ @router.post("/api/keys/batch-delete")
971
+ async def batch_delete_keys(request: BatchDeleteKeyRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
972
+ """ๆ‰น้‡ๅˆ ้™ค Key"""
973
+ try:
974
+ from app.services.api_keys import api_key_manager
975
+ deleted_count = await api_key_manager.batch_delete_keys(request.keys)
976
+ return {"success": True, "message": f"ๆˆๅŠŸๅˆ ้™ค {deleted_count} ไธช Key"}
977
+ except Exception as e:
978
+ logger.error(f"[Admin] ๆ‰น้‡ๅˆ ้™คKeyๅคฑ่ดฅ: {e}")
979
+ raise HTTPException(status_code=500, detail={"error": f"ๆ‰น้‡ๅˆ ้™คๅคฑ่ดฅ: {e}"})
980
+
981
+
982
+ @router.post("/api/keys/batch-status")
983
+ async def batch_update_key_status(request: BatchUpdateKeyStatusRequest, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
984
+ """ๆ‰น้‡ๆ›ดๆ–ฐ Key ็Šถๆ€"""
985
+ try:
986
+ from app.services.api_keys import api_key_manager
987
+ updated_count = await api_key_manager.batch_update_keys_status(request.keys, request.is_active)
988
+ return {"success": True, "message": f"ๆˆๅŠŸๆ›ดๆ–ฐ {updated_count} ไธช Key ็Šถๆ€"}
989
+ except Exception as e:
990
+ logger.error(f"[Admin] ๆ‰น้‡ๆ›ดๆ–ฐKey็Šถๆ€ๅคฑ่ดฅ: {e}")
991
+ raise HTTPException(status_code=500, detail={"error": f"ๆ‰น้‡ๆ›ดๆ–ฐๅคฑ่ดฅ: {e}"})
992
+
993
+
994
+ # === ๆ—ฅๅฟ—ๅฎก่ฎก ===
995
+
996
+ @router.get("/api/logs")
997
+ async def get_logs(limit: int = 1000, _: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
998
+ """่Žทๅ–่ฏทๆฑ‚ๆ—ฅๅฟ—"""
999
+ try:
1000
+ from app.services.request_logger import request_logger
1001
+ logs = await request_logger.get_logs(limit)
1002
+ return {"success": True, "data": logs}
1003
+ except Exception as e:
1004
+ logger.error(f"[Admin] ่Žทๅ–ๆ—ฅๅฟ—ๅคฑ่ดฅ: {e}")
1005
+ raise HTTPException(status_code=500, detail={"error": f"่Žทๅ–ๅคฑ่ดฅ: {e}"})
1006
+
1007
+ @router.post("/api/logs/clear")
1008
+ async def clear_logs(_: bool = Depends(verify_admin_session)) -> Dict[str, Any]:
1009
+ """ๆธ…็ฉบๆ—ฅๅฟ—"""
1010
+ try:
1011
+ from app.services.request_logger import request_logger
1012
+ await request_logger.clear_logs()
1013
+ return {"success": True, "message": "ๆ—ฅๅฟ—ๅทฒๆธ…็ฉบ"}
1014
+ except Exception as e:
1015
+ logger.error(f"[Admin] ๆธ…็ฉบๆ—ฅๅฟ—ๅคฑ่ดฅ: {e}")
1016
+ raise HTTPException(status_code=500, detail={"error": f"ๆธ…็ฉบๅคฑ่ดฅ: {e}"})
1017
+
app/api/v1/chat.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """่ŠๅคฉAPI่ทฏ็”ฑ - OpenAIๅ…ผๅฎน็š„่ŠๅคฉๆŽฅๅฃ"""
2
+
3
+ import time
4
+ from fastapi import APIRouter, Depends, HTTPException, Request
5
+ from typing import Optional, Dict, Any
6
+ from fastapi.responses import StreamingResponse
7
+
8
+ from app.core.auth import auth_manager
9
+ from app.core.exception import GrokApiException
10
+ from app.core.logger import logger
11
+ from app.services.grok.client import GrokClient
12
+ from app.models.openai_schema import OpenAIChatRequest
13
+ from app.services.request_stats import request_stats
14
+ from app.services.request_logger import request_logger
15
+
16
+
17
+ router = APIRouter(prefix="/chat", tags=["่Šๅคฉ"])
18
+
19
+
20
+ @router.post("/completions", response_model=None)
21
+ async def chat_completions(
22
+ request: Request,
23
+ body: OpenAIChatRequest,
24
+ auth_info: Dict[str, Any] = Depends(auth_manager.verify)
25
+ ):
26
+ """ๅˆ›ๅปบ่Šๅคฉ่กฅๅ…จ๏ผˆๆ”ฏๆŒๆตๅผๅ’Œ้žๆตๅผ๏ผ‰"""
27
+ start_time = time.time()
28
+ model = body.model
29
+ ip = request.client.host
30
+ key_name = auth_info.get("name", "Unknown")
31
+
32
+ status_code = 200
33
+ error_msg = ""
34
+
35
+ try:
36
+ logger.info(f"[Chat] ๆ”ถๅˆฐ่Šๅคฉ่ฏทๆฑ‚: {key_name} @ {ip}")
37
+
38
+ # ่ฐƒ็”จGrokๅฎขๆˆท็ซฏ
39
+ result = await GrokClient.openai_to_grok(body.model_dump())
40
+
41
+ # ่ฎฐๅฝ•ๆˆๅŠŸ็ปŸ่ฎก
42
+ await request_stats.record_request(model, success=True)
43
+
44
+ # ๆตๅผๅ“ๅบ”
45
+ if body.stream:
46
+ async def stream_wrapper():
47
+ try:
48
+ async for chunk in result:
49
+ yield chunk
50
+ finally:
51
+ # ๆตๅผ็ป“ๆŸ่ฎฐๅฝ•ๆ—ฅๅฟ—
52
+ duration = time.time() - start_time
53
+ await request_logger.add_log(ip, model, duration, 200, key_name)
54
+
55
+ return StreamingResponse(
56
+ content=stream_wrapper(),
57
+ media_type="text/event-stream",
58
+ headers={
59
+ "Cache-Control": "no-cache",
60
+ "Connection": "keep-alive",
61
+ "X-Accel-Buffering": "no"
62
+ }
63
+ )
64
+
65
+ # ้žๆตๅผๅ“ๅบ” - ่ฎฐๅฝ•ๆ—ฅๅฟ—
66
+ duration = time.time() - start_time
67
+ await request_logger.add_log(ip, model, duration, 200, key_name)
68
+ return result
69
+
70
+ except GrokApiException as e:
71
+ status_code = e.status_code or 500
72
+ error_msg = str(e)
73
+ await request_stats.record_request(model, success=False)
74
+ logger.error(f"[Chat] Grok API้”™่ฏฏ: {e} - ่ฏฆๆƒ…: {e.details}")
75
+
76
+ duration = time.time() - start_time
77
+ await request_logger.add_log(ip, model, duration, status_code, key_name, error=error_msg)
78
+
79
+ raise HTTPException(
80
+ status_code=status_code,
81
+ detail={
82
+ "error": {
83
+ "message": error_msg,
84
+ "type": e.error_code or "grok_api_error",
85
+ "code": e.error_code or "unknown"
86
+ }
87
+ }
88
+ )
89
+ except Exception as e:
90
+ status_code = 500
91
+ error_msg = str(e)
92
+ await request_stats.record_request(model, success=False)
93
+ logger.error(f"[Chat] ๅค„็†ๅคฑ่ดฅ: {e}")
94
+
95
+ duration = time.time() - start_time
96
+ await request_logger.add_log(ip, model, duration, status_code, key_name, error=error_msg)
97
+
98
+ raise HTTPException(
99
+ status_code=500,
100
+ detail={
101
+ "error": {
102
+ "message": "ๆœๅŠกๅ™จๅ†…้ƒจ้”™่ฏฏ",
103
+ "type": "internal_error",
104
+ "code": "internal_server_error"
105
+ }
106
+ }
107
+ )
app/api/v1/images.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ๅ›พ็‰‡ๆœๅŠกAPI - ๆไพ›็ผ“ๅญ˜็š„ๅ›พ็‰‡ๅ’Œ่ง†้ข‘ๆ–‡ไปถ"""
2
+
3
+ from fastapi import APIRouter, HTTPException
4
+ from fastapi.responses import FileResponse
5
+
6
+ from app.core.logger import logger
7
+ from app.services.grok.cache import image_cache_service, video_cache_service
8
+
9
+
10
+ router = APIRouter()
11
+
12
+
13
+ @router.get("/images/{img_path:path}")
14
+ async def get_image(img_path: str):
15
+ """่Žทๅ–็ผ“ๅญ˜็š„ๅ›พ็‰‡ๆˆ–่ง†้ข‘
16
+
17
+ Args:
18
+ img_path: ๆ–‡ไปถ่ทฏๅพ„๏ผˆๆ ผๅผ๏ผšusers-xxx-generated-xxx-image.jpg๏ผ‰
19
+ """
20
+ try:
21
+ # ่ฝฌๆข่ทฏๅพ„๏ผˆ็Ÿญๆจช็บฟโ†’ๆ–œๆ ๏ผ‰
22
+ original_path = "/" + img_path.replace('-', '/')
23
+
24
+ # ๅˆคๆ–ญ็ฑปๅž‹
25
+ is_video = any(original_path.lower().endswith(ext) for ext in ['.mp4', '.webm', '.mov', '.avi'])
26
+
27
+ if is_video:
28
+ cache_path = video_cache_service.get_cached(original_path)
29
+ media_type = "video/mp4"
30
+ else:
31
+ cache_path = image_cache_service.get_cached(original_path)
32
+ media_type = "image/jpeg"
33
+
34
+ if cache_path and cache_path.exists():
35
+ logger.debug(f"[MediaAPI] ่ฟ”ๅ›ž็ผ“ๅญ˜: {cache_path}")
36
+ return FileResponse(
37
+ path=str(cache_path),
38
+ media_type=media_type,
39
+ headers={
40
+ "Cache-Control": "public, max-age=86400",
41
+ "Access-Control-Allow-Origin": "*"
42
+ }
43
+ )
44
+
45
+ # ๆ–‡ไปถไธๅญ˜ๅœจ
46
+ logger.warning(f"[MediaAPI] ๆœชๆ‰พๅˆฐ: {original_path}")
47
+ raise HTTPException(status_code=404, detail="File not found")
48
+
49
+ except HTTPException:
50
+ raise
51
+ except Exception as e:
52
+ logger.error(f"[MediaAPI] ่Žทๅ–ๅคฑ่ดฅ: {e}")
53
+ raise HTTPException(status_code=500, detail=str(e))
app/api/v1/models.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ๆจกๅž‹ๆŽฅๅฃ - OpenAIๅ…ผๅฎน็š„ๆจกๅž‹ๅˆ—่กจ็ซฏ็‚น"""
2
+
3
+ import time
4
+ from typing import Dict, Any, List, Optional
5
+ from fastapi import APIRouter, HTTPException, Depends
6
+
7
+ from app.models.grok_models import Models
8
+ from app.core.auth import auth_manager
9
+ from app.core.logger import logger
10
+
11
+
12
+ router = APIRouter(tags=["ๆจกๅž‹"])
13
+
14
+
15
+ @router.get("/models")
16
+ async def list_models(_: Optional[str] = Depends(auth_manager.verify)) -> Dict[str, Any]:
17
+ """่Žทๅ–ๅฏ็”จๆจกๅž‹ๅˆ—่กจ"""
18
+ try:
19
+ logger.debug("[Models] ่ฏทๆฑ‚ๆจกๅž‹ๅˆ—่กจ")
20
+
21
+ timestamp = int(time.time())
22
+ model_data: List[Dict[str, Any]] = []
23
+
24
+ for model in Models:
25
+ model_id = model.value
26
+ config = Models.get_model_info(model_id)
27
+
28
+ model_info = {
29
+ "id": model_id,
30
+ "object": "model",
31
+ "created": timestamp,
32
+ "owned_by": "x-ai",
33
+ "display_name": config.get("display_name", model_id),
34
+ "description": config.get("description", ""),
35
+ "raw_model_path": config.get("raw_model_path", f"xai/{model_id}"),
36
+ "default_temperature": config.get("default_temperature", 1.0),
37
+ "default_max_output_tokens": config.get("default_max_output_tokens", 8192),
38
+ "supported_max_output_tokens": config.get("supported_max_output_tokens", 131072),
39
+ "default_top_p": config.get("default_top_p", 0.95)
40
+ }
41
+
42
+ model_data.append(model_info)
43
+
44
+ logger.debug(f"[Models] ่ฟ”ๅ›ž {len(model_data)} ไธชๆจกๅž‹")
45
+ return {"object": "list", "data": model_data}
46
+
47
+ except Exception as e:
48
+ logger.error(f"[Models] ่Žทๅ–ๅˆ—่กจๅคฑ่ดฅ: {e}")
49
+ raise HTTPException(
50
+ status_code=500,
51
+ detail={
52
+ "error": {
53
+ "message": f"Failed to retrieve models: {e}",
54
+ "type": "internal_error",
55
+ "code": "model_list_error"
56
+ }
57
+ }
58
+ )
59
+
60
+
61
+ @router.get("/models/{model_id}")
62
+ async def get_model(model_id: str, _: Optional[str] = Depends(auth_manager.verify)) -> Dict[str, Any]:
63
+ """่Žทๅ–็‰นๅฎšๆจกๅž‹ไฟกๆฏ"""
64
+ try:
65
+ logger.debug(f"[Models] ่ฏทๆฑ‚ๆจกๅž‹: {model_id}")
66
+
67
+ # ้ชŒ่ฏๆจกๅž‹
68
+ if not Models.is_valid_model(model_id):
69
+ logger.warning(f"[Models] ๆจกๅž‹ไธๅญ˜ๅœจ: {model_id}")
70
+ raise HTTPException(
71
+ status_code=404,
72
+ detail={
73
+ "error": {
74
+ "message": f"Model '{model_id}' not found",
75
+ "type": "invalid_request_error",
76
+ "code": "model_not_found"
77
+ }
78
+ }
79
+ )
80
+
81
+ timestamp = int(time.time())
82
+ config = Models.get_model_info(model_id)
83
+
84
+ model_info = {
85
+ "id": model_id,
86
+ "object": "model",
87
+ "created": timestamp,
88
+ "owned_by": "x-ai",
89
+ "display_name": config.get("display_name", model_id),
90
+ "description": config.get("description", ""),
91
+ "raw_model_path": config.get("raw_model_path", f"xai/{model_id}"),
92
+ "default_temperature": config.get("default_temperature", 1.0),
93
+ "default_max_output_tokens": config.get("default_max_output_tokens", 8192),
94
+ "supported_max_output_tokens": config.get("supported_max_output_tokens", 131072),
95
+ "default_top_p": config.get("default_top_p", 0.95)
96
+ }
97
+
98
+ logger.debug(f"[Models] ่ฟ”ๅ›žๆจกๅž‹: {model_id}")
99
+ return model_info
100
+
101
+ except HTTPException:
102
+ raise
103
+ except Exception as e:
104
+ logger.error(f"[Models] ่Žทๅ–ๆจกๅž‹ๅคฑ่ดฅ: {e}")
105
+ raise HTTPException(
106
+ status_code=500,
107
+ detail={
108
+ "error": {
109
+ "message": f"Failed to retrieve model: {e}",
110
+ "type": "internal_error",
111
+ "code": "model_retrieve_error"
112
+ }
113
+ }
114
+ )
app/core/auth.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """่ฎค่ฏๆจกๅ— - APIไปค็‰Œ้ชŒ่ฏ"""
2
+
3
+ from typing import Optional, Dict
4
+ from fastapi import Depends, HTTPException
5
+ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
6
+
7
+ from app.core.config import setting
8
+ from app.core.logger import logger
9
+ from app.services.api_keys import api_key_manager
10
+
11
+
12
+ # Bearerๅฎ‰ๅ…จๆ–นๆกˆ
13
+ security = HTTPBearer(auto_error=False)
14
+
15
+
16
+ def _build_error(message: str, code: str = "invalid_token") -> dict:
17
+ """ๆž„ๅปบ่ฎค่ฏ้”™่ฏฏ"""
18
+ return {
19
+ "error": {
20
+ "message": message,
21
+ "type": "authentication_error",
22
+ "code": code
23
+ }
24
+ }
25
+
26
+
27
+ class AuthManager:
28
+ """่ฎค่ฏ็ฎก็†ๅ™จ - ้ชŒ่ฏAPIไปค็‰Œ"""
29
+
30
+ @staticmethod
31
+ async def verify(credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)) -> Dict:
32
+ """้ชŒ่ฏไปค็‰Œ๏ผŒ่ฟ”ๅ›ž Key ไฟกๆฏ"""
33
+ api_key = setting.grok_config.get("api_key")
34
+
35
+ # ๅˆๅง‹ๅŒ–ๆฃ€ๆŸฅ
36
+ if not hasattr(api_key_manager, '_keys'):
37
+ await api_key_manager.init()
38
+
39
+ # ๆฃ€ๆŸฅไปค็‰Œ
40
+ if not credentials:
41
+ # ๅฆ‚ๆžœๆœช่ฎพ็ฝฎๅ…จๅฑ€Keyไธ”ๆฒกๆœ‰ๅคšKey๏ผŒๅˆ™่ทณ่ฟ‡๏ผˆๅผ€ๅ‘ๆจกๅผ๏ผ‰
42
+ if not api_key and not api_key_manager.get_all_keys():
43
+ logger.debug("[Auth] ๆœช่ฎพ็ฝฎAPI_KEY๏ผŒ่ทณ่ฟ‡้ชŒ่ฏ")
44
+ return {"key": None, "name": "Anonymous"}
45
+
46
+ raise HTTPException(
47
+ status_code=401,
48
+ detail=_build_error("็ผบๅฐ‘่ฎค่ฏไปค็‰Œ", "missing_token")
49
+ )
50
+
51
+ token = credentials.credentials
52
+
53
+ # ้ชŒ่ฏไปค็‰Œ (ๆ”ฏๆŒๅคš Key)
54
+ key_info = api_key_manager.validate_key(token)
55
+
56
+ if key_info:
57
+ return key_info
58
+
59
+ raise HTTPException(
60
+ status_code=401,
61
+ detail=_build_error(f"ไปค็‰Œๆ— ๆ•ˆ๏ผŒ้•ฟๅบฆ: {len(token)}", "invalid_token")
62
+ )
63
+
64
+
65
+ # ๅ…จๅฑ€ๅฎžไพ‹
66
+ auth_manager = AuthManager()
app/core/config.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """้…็ฝฎ็ฎก็†ๅ™จ - ็ฎก็†ๅบ”็”จ้…็ฝฎ็š„่ฏปๅ†™"""
2
+
3
+ import toml
4
+ from pathlib import Path
5
+ from typing import Dict, Any, Optional, Literal
6
+
7
+
8
+ # ้ป˜่ฎค้…็ฝฎ
9
+ DEFAULT_GROK = {
10
+ "api_key": "",
11
+ "proxy_url": "",
12
+ "proxy_pool_url": "",
13
+ "proxy_pool_interval": 300,
14
+ "cache_proxy_url": "",
15
+ "cf_clearance": "",
16
+ "x_statsig_id": "",
17
+ "dynamic_statsig": True,
18
+ "filtered_tags": "xaiartifact,xai:tool_usage_card",
19
+ "show_thinking": True,
20
+ "temporary": False,
21
+ "max_upload_concurrency": 20,
22
+ "max_request_concurrency": 100,
23
+ "stream_first_response_timeout": 30,
24
+ "stream_chunk_timeout": 120,
25
+ "stream_total_timeout": 600,
26
+ "retry_status_codes": [401, 429], # ๅฏ้‡่ฏ•็š„HTTP็Šถๆ€็ 
27
+ }
28
+
29
+ DEFAULT_GLOBAL = {
30
+ "base_url": "http://localhost:8000",
31
+ "log_level": "INFO",
32
+ "image_mode": "url",
33
+ "admin_password": "admin",
34
+ "admin_username": "admin",
35
+ "image_cache_max_size_mb": 512,
36
+ "video_cache_max_size_mb": 1024,
37
+ "max_upload_concurrency": 20, # ๆœ€ๅคงๅนถๅ‘ไธŠไผ ๆ•ฐ
38
+ "max_request_concurrency": 50, # ๆœ€ๅคงๅนถๅ‘่ฏทๆฑ‚ๆ•ฐ
39
+ "batch_save_interval": 1.0, # ๆ‰น้‡ไฟๅญ˜้—ด้š”๏ผˆ็ง’๏ผ‰
40
+ "batch_save_threshold": 10 # ่งฆๅ‘ๆ‰น้‡ไฟๅญ˜็š„ๅ˜ๆ›ดๆ•ฐ้˜ˆๅ€ผ
41
+ }
42
+
43
+
44
+ class ConfigManager:
45
+ """้…็ฝฎ็ฎก็†ๅ™จ"""
46
+
47
+ def __init__(self) -> None:
48
+ """ๅˆๅง‹ๅŒ–้…็ฝฎ"""
49
+ self.config_path: Path = Path(__file__).parents[2] / "data" / "setting.toml"
50
+ self._storage: Optional[Any] = None
51
+ self._ensure_exists()
52
+ self.global_config: Dict[str, Any] = self.load("global")
53
+ self.grok_config: Dict[str, Any] = self.load("grok")
54
+
55
+ def _ensure_exists(self) -> None:
56
+ """็กฎไฟ้…็ฝฎๅญ˜ๅœจ"""
57
+ if not self.config_path.exists():
58
+ self.config_path.parent.mkdir(parents=True, exist_ok=True)
59
+ self._create_default()
60
+
61
+ def _create_default(self) -> None:
62
+ """ๅˆ›ๅปบ้ป˜่ฎค้…็ฝฎ"""
63
+ default = {"grok": DEFAULT_GROK.copy(), "global": DEFAULT_GLOBAL.copy()}
64
+ with open(self.config_path, "w", encoding="utf-8") as f:
65
+ toml.dump(default, f)
66
+
67
+ def _normalize_proxy(self, proxy: str) -> str:
68
+ """ๆ ‡ๅ‡†ๅŒ–ไปฃ็†URL๏ผˆsock5/socks5 โ†’ socks5h://๏ผ‰"""
69
+ if not proxy:
70
+ return proxy
71
+
72
+ proxy = proxy.strip()
73
+ if proxy.startswith("sock5h://"):
74
+ proxy = proxy.replace("sock5h://", "socks5h://", 1)
75
+ if proxy.startswith("sock5://"):
76
+ proxy = proxy.replace("sock5://", "socks5://", 1)
77
+ if proxy.startswith("socks5://"):
78
+ return proxy.replace("socks5://", "socks5h://", 1)
79
+ return proxy
80
+
81
+ def _normalize_cf(self, cf: str) -> str:
82
+ """ๆ ‡ๅ‡†ๅŒ–CF Clearance๏ผˆ่‡ชๅŠจๆทปๅŠ ๅ‰็ผ€๏ผ‰"""
83
+ if cf and not cf.startswith("cf_clearance="):
84
+ return f"cf_clearance={cf}"
85
+ return cf
86
+
87
+ def set_storage(self, storage: Any) -> None:
88
+ """่ฎพ็ฝฎๅญ˜ๅ‚จๅฎžไพ‹"""
89
+ self._storage = storage
90
+
91
+ def load(self, section: Literal["global", "grok"]) -> Dict[str, Any]:
92
+ """ๅŠ ่ฝฝ้…็ฝฎ่Š‚"""
93
+ try:
94
+ with open(self.config_path, "r", encoding="utf-8") as f:
95
+ config = toml.load(f)[section]
96
+
97
+ # ๆ ‡ๅ‡†ๅŒ–Grok้…็ฝฎ
98
+ if section == "grok":
99
+ if "proxy_url" in config:
100
+ config["proxy_url"] = self._normalize_proxy(config["proxy_url"])
101
+ if "cache_proxy_url" in config:
102
+ config["cache_proxy_url"] = self._normalize_proxy(config["cache_proxy_url"])
103
+ if "cf_clearance" in config:
104
+ config["cf_clearance"] = self._normalize_cf(config["cf_clearance"])
105
+
106
+ return config
107
+ except Exception as e:
108
+ raise Exception(f"[Setting] ้…็ฝฎๅŠ ่ฝฝๅคฑ่ดฅ: {e}") from e
109
+
110
+ async def reload(self) -> None:
111
+ """้‡ๆ–ฐๅŠ ่ฝฝ้…็ฝฎ"""
112
+ self.global_config = self.load("global")
113
+ self.grok_config = self.load("grok")
114
+
115
+ async def _save_file(self, updates: Dict[str, Dict[str, Any]]) -> None:
116
+ """ไฟๅญ˜ๅˆฐๆ–‡ไปถ"""
117
+ import aiofiles
118
+
119
+ async with aiofiles.open(self.config_path, "r", encoding="utf-8") as f:
120
+ config = toml.loads(await f.read())
121
+
122
+ for section, data in updates.items():
123
+ if section in config:
124
+ config[section].update(data)
125
+
126
+ async with aiofiles.open(self.config_path, "w", encoding="utf-8") as f:
127
+ await f.write(toml.dumps(config))
128
+
129
+ async def _save_storage(self, updates: Dict[str, Dict[str, Any]]) -> None:
130
+ """ไฟๅญ˜ๅˆฐๅญ˜ๅ‚จ"""
131
+ config = await self._storage.load_config()
132
+
133
+ for section, data in updates.items():
134
+ if section in config:
135
+ config[section].update(data)
136
+
137
+ await self._storage.save_config(config)
138
+
139
+ def _prepare_grok(self, grok: Dict[str, Any]) -> Dict[str, Any]:
140
+ """ๅ‡†ๅค‡Grok้…็ฝฎ๏ผˆ็งป้™คๅ‰็ผ€๏ผ‰"""
141
+ processed = grok.copy()
142
+ if "cf_clearance" in processed:
143
+ cf = processed["cf_clearance"]
144
+ if cf and cf.startswith("cf_clearance="):
145
+ processed["cf_clearance"] = cf.replace("cf_clearance=", "", 1)
146
+ return processed
147
+
148
+ async def save(self, global_config: Optional[Dict[str, Any]] = None, grok_config: Optional[Dict[str, Any]] = None) -> None:
149
+ """ไฟๅญ˜้…็ฝฎ"""
150
+ updates = {}
151
+
152
+ if global_config:
153
+ updates["global"] = global_config
154
+ if grok_config:
155
+ updates["grok"] = self._prepare_grok(grok_config)
156
+
157
+ # ้€‰ๆ‹ฉๅญ˜ๅ‚จๆ–นๅผ
158
+ if self._storage:
159
+ await self._save_storage(updates)
160
+ else:
161
+ await self._save_file(updates)
162
+
163
+ await self.reload()
164
+
165
+ async def get_proxy_async(self, proxy_type: Literal["service", "cache"] = "service") -> str:
166
+ """ๅผ‚ๆญฅ่Žทๅ–ไปฃ็†URL๏ผˆๆ”ฏๆŒไปฃ็†ๆฑ ๏ผ‰
167
+
168
+ Args:
169
+ proxy_type: ไปฃ็†็ฑปๅž‹
170
+ - service: ๆœๅŠกไปฃ็†๏ผˆclient/upload๏ผ‰
171
+ - cache: ็ผ“ๅญ˜ไปฃ็†๏ผˆcache๏ผ‰
172
+ """
173
+ from app.core.proxy_pool import proxy_pool
174
+
175
+ if proxy_type == "cache":
176
+ cache_proxy = self.grok_config.get("cache_proxy_url", "")
177
+ if cache_proxy:
178
+ return cache_proxy
179
+
180
+ # ไปŽไปฃ็†ๆฑ ่Žทๅ–
181
+ return await proxy_pool.get_proxy() or ""
182
+
183
+ def get_proxy(self, proxy_type: Literal["service", "cache"] = "service") -> str:
184
+ """่Žทๅ–ไปฃ็†URL๏ผˆๅŒๆญฅๆ–นๆณ•๏ผŒ็”จไบŽๅ‘ๅŽๅ…ผๅฎน๏ผ‰
185
+
186
+ Args:
187
+ proxy_type: ไปฃ็†็ฑปๅž‹
188
+ - service: ๆœๅŠกไปฃ็†๏ผˆclient/upload๏ผ‰
189
+ - cache: ็ผ“ๅญ˜ไปฃ็†๏ผˆcache๏ผ‰
190
+ """
191
+ from app.core.proxy_pool import proxy_pool
192
+
193
+ if proxy_type == "cache":
194
+ cache_proxy = self.grok_config.get("cache_proxy_url", "")
195
+ if cache_proxy:
196
+ return cache_proxy
197
+
198
+ # ่ฟ”ๅ›žๅฝ“ๅ‰ไปฃ็†๏ผˆๅฆ‚ๆžœๆ˜ฏไปฃ็†ๆฑ ๏ผŒ่ฟ”ๅ›žๆœ€ๅŽไธ€ๆฌก่Žทๅ–็š„๏ผ‰
199
+ return proxy_pool.get_current_proxy() or self.grok_config.get("proxy_url", "")
200
+
201
+
202
+ # ๅ…จๅฑ€ๅฎžไพ‹
203
+ setting = ConfigManager()
app/core/exception.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ๅผ‚ๅธธๅค„็†ๅ™จ - OpenAIๅ…ผๅฎน็š„้”™่ฏฏๅ“ๅบ”"""
2
+
3
+ from fastapi import Request, status
4
+ from fastapi.responses import JSONResponse
5
+ from fastapi.exceptions import RequestValidationError
6
+ from starlette.exceptions import HTTPException as StarletteHTTPException
7
+
8
+
9
+ # HTTP้”™่ฏฏๆ˜ ๅฐ„
10
+ HTTP_ERROR_MAP = {
11
+ 400: ("invalid_request_error", "่ฏทๆฑ‚ๆ ผๅผ้”™่ฏฏๆˆ–็ผบๅฐ‘ๅฟ…ๅกซๅ‚ๆ•ฐ"),
12
+ 401: ("invalid_request_error", "ไปค็‰Œ่ฎค่ฏๅคฑ่ดฅ"),
13
+ 403: ("permission_error", "ๆฒกๆœ‰ๆƒ้™่ฎฟ้—ฎๆญค่ต„ๆบ"),
14
+ 404: ("invalid_request_error", "่ฏทๆฑ‚็š„่ต„ๆบไธๅญ˜ๅœจ"),
15
+ 429: ("rate_limit_error", "่ฏทๆฑ‚้ข‘็އ่ถ…ๅ‡บ้™ๅˆถ๏ผŒ่ฏท็จๅŽๅ†่ฏ•"),
16
+ 500: ("api_error", "ๅ†…้ƒจๆœๅŠกๅ™จ้”™่ฏฏ"),
17
+ 503: ("api_error", "ๆœๅŠกๆš‚ๆ—ถไธๅฏ็”จ"),
18
+ }
19
+
20
+ # Grok้”™่ฏฏ็ ๆ˜ ๅฐ„
21
+ GROK_STATUS_MAP = {
22
+ "NO_AUTH_TOKEN": status.HTTP_401_UNAUTHORIZED,
23
+ "INVALID_TOKEN": status.HTTP_401_UNAUTHORIZED,
24
+ "HTTP_ERROR": status.HTTP_502_BAD_GATEWAY,
25
+ "NETWORK_ERROR": status.HTTP_503_SERVICE_UNAVAILABLE,
26
+ "JSON_ERROR": status.HTTP_502_BAD_GATEWAY,
27
+ "API_ERROR": status.HTTP_502_BAD_GATEWAY,
28
+ "STREAM_ERROR": status.HTTP_502_BAD_GATEWAY,
29
+ "NO_RESPONSE": status.HTTP_502_BAD_GATEWAY,
30
+ "TOKEN_SAVE_ERROR": status.HTTP_500_INTERNAL_SERVER_ERROR,
31
+ "NO_AVAILABLE_TOKEN": status.HTTP_503_SERVICE_UNAVAILABLE,
32
+ }
33
+
34
+ GROK_TYPE_MAP = {
35
+ "NO_AUTH_TOKEN": "authentication_error",
36
+ "INVALID_TOKEN": "authentication_error",
37
+ "HTTP_ERROR": "api_error",
38
+ "NETWORK_ERROR": "api_error",
39
+ "JSON_ERROR": "api_error",
40
+ "API_ERROR": "api_error",
41
+ "STREAM_ERROR": "api_error",
42
+ "NO_RESPONSE": "api_error",
43
+ "TOKEN_SAVE_ERROR": "api_error",
44
+ "NO_AVAILABLE_TOKEN": "api_error",
45
+ }
46
+
47
+
48
+ class GrokApiException(Exception):
49
+ """Grok APIไธšๅŠกๅผ‚ๅธธ"""
50
+
51
+ def __init__(self, message: str, error_code: str = None, details: dict = None, context: dict = None, status_code: int = None):
52
+ self.message = message
53
+ self.error_code = error_code
54
+ self.details = details or {}
55
+ self.context = context or {}
56
+ self.status_code = status_code or GROK_STATUS_MAP.get(error_code)
57
+ super().__init__(self.message)
58
+
59
+
60
+ def build_error_response(message: str, error_type: str, code: str = None, param: str = None) -> dict:
61
+ """ๆž„ๅปบOpenAIๅ…ผๅฎน็š„้”™่ฏฏๅ“ๅบ”"""
62
+ error = {"message": message, "type": error_type}
63
+
64
+ if code:
65
+ error["code"] = code
66
+ if param:
67
+ error["param"] = param
68
+
69
+ return {"error": error}
70
+
71
+
72
+ async def http_exception_handler(_: Request, exc: StarletteHTTPException) -> JSONResponse:
73
+ """ๅค„็†HTTPๅผ‚ๅธธ"""
74
+ error_type, default_msg = HTTP_ERROR_MAP.get(exc.status_code, ("api_error", str(exc.detail)))
75
+ message = str(exc.detail) if exc.detail else default_msg
76
+
77
+ return JSONResponse(
78
+ status_code=exc.status_code,
79
+ content=build_error_response(message, error_type)
80
+ )
81
+
82
+
83
+ async def validation_exception_handler(_: Request, exc: RequestValidationError) -> JSONResponse:
84
+ """ๅค„็†้ชŒ่ฏ้”™่ฏฏ"""
85
+ errors = exc.errors()
86
+ param = errors[0]["loc"][-1] if errors and errors[0].get("loc") else None
87
+ message = errors[0]["msg"] if errors and errors[0].get("msg") else "่ฏทๆฑ‚ๅ‚ๆ•ฐ้”™่ฏฏ"
88
+
89
+ return JSONResponse(
90
+ status_code=status.HTTP_400_BAD_REQUEST,
91
+ content=build_error_response(message, "invalid_request_error", param=param)
92
+ )
93
+
94
+
95
+ async def grok_api_exception_handler(_: Request, exc: GrokApiException) -> JSONResponse:
96
+ """ๅค„็†Grok APIๅผ‚ๅธธ"""
97
+ http_status = GROK_STATUS_MAP.get(exc.error_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
98
+ error_type = GROK_TYPE_MAP.get(exc.error_code, "api_error")
99
+
100
+ return JSONResponse(
101
+ status_code=http_status,
102
+ content=build_error_response(exc.message, error_type, exc.error_code)
103
+ )
104
+
105
+
106
+ async def global_exception_handler(_: Request, exc: Exception) -> JSONResponse:
107
+ """ๅค„็†ๆœชๆ•่Žทๅผ‚ๅธธ"""
108
+ return JSONResponse(
109
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
110
+ content=build_error_response("ๆœๅŠกๅ™จ้‡ๅˆฐๆ„ๅค–้”™่ฏฏ๏ผŒ่ฏท้‡่ฏ•", "api_error")
111
+ )
112
+
113
+
114
+ def register_exception_handlers(app) -> None:
115
+ """ๆณจๅ†Œๅผ‚ๅธธๅค„็†ๅ™จ"""
116
+ app.add_exception_handler(StarletteHTTPException, http_exception_handler)
117
+ app.add_exception_handler(RequestValidationError, validation_exception_handler)
118
+ app.add_exception_handler(GrokApiException, grok_api_exception_handler)
119
+ app.add_exception_handler(Exception, global_exception_handler)
app/core/logger.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ๅ…จๅฑ€ๆ—ฅๅฟ—ๆจกๅ— - ๅ•ไพ‹ๆจกๅผ็š„ๆ—ฅๅฟ—็ฎก็†ๅ™จ"""
2
+
3
+ import sys
4
+ import logging
5
+ from pathlib import Path
6
+ from logging.handlers import RotatingFileHandler
7
+
8
+ from app.core.config import setting
9
+
10
+
11
+ # ่ฟ‡ๆปคๆจกๅผ
12
+ FILTER_PATTERNS = [
13
+ "chunk: b'", # SSEๅŽŸๅง‹ๅญ—่Š‚
14
+ "Got event:", # SSEไบ‹ไปถ
15
+ "Closing", # SSEๅ…ณ้—ญ
16
+ ]
17
+
18
+
19
+ class MCPLogFilter(logging.Filter):
20
+ """MCPๆ—ฅๅฟ—่ฟ‡ๆปคๅ™จ - ่ฟ‡ๆปคๅคง้‡ๆ•ฐๆฎ็š„DEBUGๆ—ฅๅฟ—"""
21
+
22
+ def filter(self, record: logging.LogRecord) -> bool:
23
+ """่ฟ‡ๆปคๆ—ฅๅฟ—"""
24
+ # ่ฟ‡ๆปคSSE็š„DEBUGๆ—ฅๅฟ—
25
+ if record.name == "sse_starlette.sse" and record.levelno == logging.DEBUG:
26
+ msg = record.getMessage()
27
+ return not any(p in msg for p in FILTER_PATTERNS)
28
+
29
+ # ่ฟ‡ๆปคMCP streamable_http็š„DEBUGๆ—ฅๅฟ—
30
+ if "mcp.server.streamable_http" in record.name and record.levelno == logging.DEBUG:
31
+ return False
32
+
33
+ return True
34
+
35
+
36
+ class LoggerManager:
37
+ """ๆ—ฅๅฟ—็ฎก็†ๅ™จ๏ผˆๅ•ไพ‹๏ผ‰"""
38
+
39
+ _instance = None
40
+ _initialized = False
41
+
42
+ def __new__(cls):
43
+ if cls._instance is None:
44
+ cls._instance = super().__new__(cls)
45
+ return cls._instance
46
+
47
+ def __init__(self):
48
+ """ๅˆๅง‹ๅŒ–ๆ—ฅๅฟ—็ณป็ปŸ"""
49
+ if LoggerManager._initialized:
50
+ return
51
+
52
+ # ้…็ฝฎ
53
+ log_dir = Path(__file__).parents[2] / "logs"
54
+ log_dir.mkdir(exist_ok=True)
55
+ log_level = setting.global_config.get("log_level", "INFO").upper()
56
+ log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
57
+ log_file = log_dir / "app.log"
58
+
59
+ # ๆ นๆ—ฅๅฟ—ๅ™จ
60
+ self.logger = logging.getLogger()
61
+ self.logger.setLevel(log_level)
62
+
63
+ # ้ฟๅ…้‡ๅคๆทปๅŠ 
64
+ if self.logger.handlers:
65
+ return
66
+
67
+ # ๆ ผๅผๅ™จๅ’Œ่ฟ‡ๆปคๅ™จ
68
+ formatter = logging.Formatter(log_format)
69
+ mcp_filter = MCPLogFilter()
70
+
71
+ # ๆŽงๅˆถๅฐๅค„็†ๅ™จ
72
+ console = logging.StreamHandler(sys.stdout)
73
+ console.setLevel(log_level)
74
+ console.setFormatter(formatter)
75
+ console.addFilter(mcp_filter)
76
+
77
+ # ๆ–‡ไปถๅค„็†ๅ™จ๏ผˆ10MB๏ผŒ5ไธชๅค‡ไปฝ๏ผ‰
78
+ file_handler = RotatingFileHandler(
79
+ log_file, maxBytes=10*1024*1024, backupCount=5, encoding="utf-8"
80
+ )
81
+ file_handler.setLevel(log_level)
82
+ file_handler.setFormatter(formatter)
83
+ file_handler.addFilter(mcp_filter)
84
+
85
+ # ๆทปๅŠ ๅค„็†ๅ™จ
86
+ self.logger.addHandler(console)
87
+ self.logger.addHandler(file_handler)
88
+
89
+ # ้…็ฝฎ็ฌฌไธ‰ๆ–นๅบ“
90
+ self._configure_third_party()
91
+
92
+ LoggerManager._initialized = True
93
+
94
+ def _configure_third_party(self):
95
+ """้…็ฝฎ็ฌฌไธ‰ๆ–นๅบ“ๆ—ฅๅฟ—็บงๅˆซ"""
96
+ config = {
97
+ "asyncio": logging.WARNING,
98
+ "uvicorn": logging.INFO,
99
+ "fastapi": logging.INFO,
100
+ "aiomysql": logging.WARNING,
101
+ "mcp": logging.CRITICAL,
102
+ "fastmcp": logging.CRITICAL,
103
+ }
104
+
105
+ for name, level in config.items():
106
+ logging.getLogger(name).setLevel(level)
107
+
108
+ def debug(self, msg: str) -> None:
109
+ """่ฐƒ่ฏ•ๆ—ฅๅฟ—"""
110
+ self.logger.debug(msg)
111
+
112
+ def info(self, msg: str) -> None:
113
+ """ไฟกๆฏๆ—ฅๅฟ—"""
114
+ self.logger.info(msg)
115
+
116
+ def warning(self, msg: str) -> None:
117
+ """่ญฆๅ‘Šๆ—ฅๅฟ—"""
118
+ self.logger.warning(msg)
119
+
120
+ def error(self, msg: str) -> None:
121
+ """้”™่ฏฏๆ—ฅๅฟ—"""
122
+ self.logger.error(msg)
123
+
124
+ def critical(self, msg: str) -> None:
125
+ """ไธฅ้‡้”™่ฏฏๆ—ฅๅฟ—"""
126
+ self.logger.critical(msg)
127
+
128
+
129
+ # ๅ…จๅฑ€ๅฎžไพ‹
130
+ logger = LoggerManager()
app/core/proxy_pool.py ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ไปฃ็†ๆฑ ็ฎก็†ๅ™จ - ไปŽURLๅŠจๆ€่Žทๅ–ไปฃ็†IP"""
2
+
3
+ import asyncio
4
+ import aiohttp
5
+ import time
6
+ from typing import Optional, List
7
+ from app.core.logger import logger
8
+
9
+
10
+ class ProxyPool:
11
+ """ไปฃ็†ๆฑ ็ฎก็†ๅ™จ"""
12
+
13
+ def __init__(self):
14
+ self._pool_url: Optional[str] = None
15
+ self._static_proxy: Optional[str] = None
16
+ self._current_proxy: Optional[str] = None
17
+ self._last_fetch_time: float = 0
18
+ self._fetch_interval: int = 300 # 5ๅˆ†้’Ÿๅˆทๆ–ฐไธ€ๆฌก
19
+ self._enabled: bool = False
20
+ self._lock = asyncio.Lock()
21
+
22
+ def configure(self, proxy_url: str, proxy_pool_url: str = "", proxy_pool_interval: int = 300):
23
+ """้…็ฝฎไปฃ็†ๆฑ 
24
+
25
+ Args:
26
+ proxy_url: ้™ๆ€ไปฃ็†URL๏ผˆsocks5h://xxx ๆˆ– http://xxx๏ผ‰
27
+ proxy_pool_url: ไปฃ็†ๆฑ API URL๏ผŒ่ฟ”ๅ›žๅ•ไธชไปฃ็†ๅœฐๅ€
28
+ proxy_pool_interval: ไปฃ็†ๆฑ ๅˆทๆ–ฐ้—ด้š”๏ผˆ็ง’๏ผ‰
29
+ """
30
+ self._static_proxy = self._normalize_proxy(proxy_url) if proxy_url else None
31
+ pool_url = proxy_pool_url.strip() if proxy_pool_url else None
32
+ if pool_url and self._looks_like_proxy_url(pool_url):
33
+ normalized_proxy = self._normalize_proxy(pool_url)
34
+ if not self._static_proxy:
35
+ self._static_proxy = normalized_proxy
36
+ logger.warning("[ProxyPool] proxy_pool_url็œ‹่ตทๆฅๆ˜ฏไปฃ็†ๅœฐๅ€๏ผŒๅทฒไฝœไธบ้™ๆ€ไปฃ็†ไฝฟ็”จ๏ผŒ่ฏทๆ”น็”จproxy_url")
37
+ else:
38
+ logger.warning("[ProxyPool] proxy_pool_url็œ‹่ตทๆฅๆ˜ฏไปฃ็†ๅœฐๅ€๏ผŒๅทฒๅฟฝ็•ฅ๏ผˆไฝฟ็”จproxy_url๏ผ‰")
39
+ pool_url = None
40
+ self._pool_url = pool_url
41
+ self._fetch_interval = proxy_pool_interval
42
+ self._enabled = bool(self._pool_url)
43
+
44
+ if self._enabled:
45
+ logger.info(f"[ProxyPool] ไปฃ็†ๆฑ ๅทฒๅฏ็”จ: {self._pool_url}, ๅˆทๆ–ฐ้—ด้š”: {self._fetch_interval}s")
46
+ elif self._static_proxy:
47
+ logger.info(f"[ProxyPool] ไฝฟ็”จ้™ๆ€ไปฃ็†: {self._static_proxy}")
48
+ self._current_proxy = self._static_proxy
49
+ else:
50
+ logger.info("[ProxyPool] ๆœช้…็ฝฎไปฃ็†")
51
+
52
+ async def get_proxy(self) -> Optional[str]:
53
+ """่Žทๅ–ไปฃ็†ๅœฐๅ€
54
+
55
+ Returns:
56
+ ไปฃ็†URLๆˆ–None
57
+ """
58
+ # ๅฆ‚ๆžœๆœชๅฏ็”จไปฃ็†ๆฑ ๏ผŒ่ฟ”ๅ›ž้™ๆ€ไปฃ็†
59
+ if not self._enabled:
60
+ return self._static_proxy
61
+
62
+ # ๆฃ€ๆŸฅๆ˜ฏๅฆ้œ€่ฆๅˆทๆ–ฐ
63
+ now = time.time()
64
+ if not self._current_proxy or (now - self._last_fetch_time) >= self._fetch_interval:
65
+ async with self._lock:
66
+ # ๅŒ้‡ๆฃ€ๆŸฅ
67
+ if not self._current_proxy or (now - self._last_fetch_time) >= self._fetch_interval:
68
+ await self._fetch_proxy()
69
+
70
+ return self._current_proxy
71
+
72
+ async def force_refresh(self) -> Optional[str]:
73
+ """ๅผบๅˆถๅˆทๆ–ฐไปฃ็†๏ผˆ็”จไบŽ403้”™่ฏฏ้‡่ฏ•๏ผ‰
74
+
75
+ Returns:
76
+ ๆ–ฐ็š„ไปฃ็†URLๆˆ–None
77
+ """
78
+ if not self._enabled:
79
+ return self._static_proxy
80
+
81
+ async with self._lock:
82
+ await self._fetch_proxy()
83
+
84
+ return self._current_proxy
85
+
86
+ async def _fetch_proxy(self):
87
+ """ไปŽไปฃ็†ๆฑ URL่Žทๅ–ๆ–ฐ็š„ไปฃ็†"""
88
+ try:
89
+ logger.debug(f"[ProxyPool] ๆญฃๅœจไปŽไปฃ็†ๆฑ ่Žทๅ–ๆ–ฐไปฃ็†: {self._pool_url}")
90
+
91
+ timeout = aiohttp.ClientTimeout(total=10)
92
+ async with aiohttp.ClientSession(timeout=timeout) as session:
93
+ async with session.get(self._pool_url) as response:
94
+ if response.status == 200:
95
+ proxy_text = await response.text()
96
+ proxy = self._normalize_proxy(proxy_text.strip())
97
+
98
+ # ้ชŒ่ฏไปฃ็†ๆ ผๅผ
99
+ if self._validate_proxy(proxy):
100
+ self._current_proxy = proxy
101
+ self._last_fetch_time = time.time()
102
+ logger.info(f"[ProxyPool] ๆˆๅŠŸ่Žทๅ–ๆ–ฐไปฃ็†: {proxy}")
103
+ else:
104
+ logger.error(f"[ProxyPool] ไปฃ็†ๆ ผๅผๆ— ๆ•ˆ: {proxy}")
105
+ # ้™็บงๅˆฐ้™ๆ€ไปฃ็†
106
+ if not self._current_proxy:
107
+ self._current_proxy = self._static_proxy
108
+ else:
109
+ logger.error(f"[ProxyPool] ่Žทๅ–ไปฃ็†ๅคฑ่ดฅ: HTTP {response.status}")
110
+ # ้™็บงๅˆฐ้™ๆ€ไปฃ็†
111
+ if not self._current_proxy:
112
+ self._current_proxy = self._static_proxy
113
+
114
+ except asyncio.TimeoutError:
115
+ logger.error("[ProxyPool] ่Žทๅ–ไปฃ็†่ถ…ๆ—ถ")
116
+ if not self._current_proxy:
117
+ self._current_proxy = self._static_proxy
118
+
119
+ except Exception as e:
120
+ logger.error(f"[ProxyPool] ่Žทๅ–ไปฃ็†ๅผ‚ๅธธ: {e}")
121
+ # ้™็บงๅˆฐ้™ๆ€ไปฃ็†
122
+ if not self._current_proxy:
123
+ self._current_proxy = self._static_proxy
124
+
125
+ def _validate_proxy(self, proxy: str) -> bool:
126
+ """้ชŒ่ฏไปฃ็†ๆ ผๅผ
127
+
128
+ Args:
129
+ proxy: ไปฃ็†URL
130
+
131
+ Returns:
132
+ ๆ˜ฏๅฆๆœ‰ๆ•ˆ
133
+ """
134
+ if not proxy:
135
+ return False
136
+
137
+ # ๆ”ฏๆŒ็š„ๅ่ฎฎ
138
+ valid_protocols = ['http://', 'https://', 'socks5://', 'socks5h://']
139
+
140
+ return any(proxy.startswith(proto) for proto in valid_protocols)
141
+
142
+ def _normalize_proxy(self, proxy: str) -> str:
143
+ """ๆ ‡ๅ‡†ๅŒ–ไปฃ็†URL๏ผˆsock5/socks5 โ†’ socks5h://๏ผ‰"""
144
+ if not proxy:
145
+ return proxy
146
+
147
+ proxy = proxy.strip()
148
+ if proxy.startswith("sock5h://"):
149
+ proxy = proxy.replace("sock5h://", "socks5h://", 1)
150
+ if proxy.startswith("sock5://"):
151
+ proxy = proxy.replace("sock5://", "socks5://", 1)
152
+ if proxy.startswith("socks5://"):
153
+ return proxy.replace("socks5://", "socks5h://", 1)
154
+ return proxy
155
+
156
+ def _looks_like_proxy_url(self, url: str) -> bool:
157
+ """ๅˆคๆ–ญURLๆ˜ฏๅฆๅƒไปฃ็†ๅœฐๅ€๏ผˆ้ฟๅ…่ฏฏๆŠŠไปฃ็†ๆฑ APIๅฝ“ไปฃ็†๏ผ‰"""
158
+ return url.startswith(("sock5://", "sock5h://", "socks5://", "socks5h://"))
159
+
160
+ def get_current_proxy(self) -> Optional[str]:
161
+ """่Žทๅ–ๅฝ“ๅ‰ไฝฟ็”จ็š„ไปฃ็†๏ผˆๅŒๆญฅๆ–นๆณ•๏ผ‰
162
+
163
+ Returns:
164
+ ๅฝ“ๅ‰ไปฃ็†URLๆˆ–None
165
+ """
166
+ return self._current_proxy or self._static_proxy
167
+
168
+
169
+ # ๅ…จๅฑ€ไปฃ็†ๆฑ ๅฎžไพ‹
170
+ proxy_pool = ProxyPool()
app/core/storage.py ADDED
@@ -0,0 +1,445 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ๅญ˜ๅ‚จๆŠฝ่ฑกๅฑ‚ - ๆ”ฏๆŒๆ–‡ไปถใ€MySQLๅ’ŒRedisๅญ˜ๅ‚จ"""
2
+
3
+ import os
4
+ import orjson
5
+ import toml
6
+ import asyncio
7
+ import warnings
8
+ import aiofiles
9
+ from pathlib import Path
10
+ from typing import Dict, Any, Optional, Literal
11
+ from abc import ABC, abstractmethod
12
+ from urllib.parse import urlparse, unquote
13
+
14
+ from app.core.logger import logger
15
+
16
+
17
+ StorageMode = Literal["file", "mysql", "redis"]
18
+
19
+
20
+ class BaseStorage(ABC):
21
+ """ๅญ˜ๅ‚จๅŸบ็ฑป"""
22
+
23
+ @abstractmethod
24
+ async def init_db(self) -> None:
25
+ """ๅˆๅง‹ๅŒ–ๆ•ฐๆฎๅบ“"""
26
+ pass
27
+
28
+ @abstractmethod
29
+ async def load_tokens(self) -> Dict[str, Any]:
30
+ """ๅŠ ่ฝฝtokenๆ•ฐๆฎ"""
31
+ pass
32
+
33
+ @abstractmethod
34
+ async def save_tokens(self, data: Dict[str, Any]) -> None:
35
+ """ไฟๅญ˜tokenๆ•ฐๆฎ"""
36
+ pass
37
+
38
+ @abstractmethod
39
+ async def load_config(self) -> Dict[str, Any]:
40
+ """ๅŠ ่ฝฝ้…็ฝฎๆ•ฐๆฎ"""
41
+ pass
42
+
43
+ @abstractmethod
44
+ async def save_config(self, data: Dict[str, Any]) -> None:
45
+ """ไฟๅญ˜้…็ฝฎๆ•ฐๆฎ"""
46
+ pass
47
+
48
+
49
+ class FileStorage(BaseStorage):
50
+ """ๆ–‡ไปถๅญ˜ๅ‚จ"""
51
+
52
+ def __init__(self, data_dir: Path):
53
+ self.data_dir = data_dir
54
+ self.token_file = data_dir / "token.json"
55
+ self.config_file = data_dir / "setting.toml"
56
+ self._token_lock = asyncio.Lock()
57
+ self._config_lock = asyncio.Lock()
58
+
59
+ async def init_db(self) -> None:
60
+ """ๅˆๅง‹ๅŒ–ๆ–‡ไปถๅญ˜ๅ‚จ"""
61
+ self.data_dir.mkdir(parents=True, exist_ok=True)
62
+
63
+ if not self.token_file.exists():
64
+ await self._write(self.token_file, orjson.dumps({"sso": {}, "ssoSuper": {}}, option=orjson.OPT_INDENT_2).decode())
65
+ logger.info("[Storage] ๅˆ›ๅปบtokenๆ–‡ไปถ")
66
+
67
+ if not self.config_file.exists():
68
+ default = {
69
+ "global": {"api_keys": [], "admin_username": "admin", "admin_password": "admin"},
70
+ "grok": {"proxy_url": "", "cf_clearance": "", "x_statsig_id": ""}
71
+ }
72
+ await self._write(self.config_file, toml.dumps(default))
73
+ logger.info("[Storage] ๅˆ›ๅปบ้…็ฝฎๆ–‡ไปถ")
74
+
75
+ async def _read(self, path: Path) -> str:
76
+ """่ฏปๅ–ๆ–‡ไปถ"""
77
+ async with aiofiles.open(path, "r", encoding="utf-8") as f:
78
+ return await f.read()
79
+
80
+ async def _write(self, path: Path, content: str) -> None:
81
+ """ๅ†™ๅ…ฅๆ–‡ไปถ"""
82
+ async with aiofiles.open(path, "w", encoding="utf-8") as f:
83
+ await f.write(content)
84
+
85
+ async def _load_json(self, path: Path, default: Dict, lock: asyncio.Lock) -> Dict[str, Any]:
86
+ """ๅŠ ่ฝฝJSON"""
87
+ try:
88
+ async with lock:
89
+ if not path.exists():
90
+ return default
91
+ return orjson.loads(await self._read(path))
92
+ except Exception as e:
93
+ logger.error(f"[Storage] ๅŠ ่ฝฝ{path.name}ๅคฑ่ดฅ: {e}")
94
+ return default
95
+
96
+ async def _save_json(self, path: Path, data: Dict, lock: asyncio.Lock) -> None:
97
+ """ไฟๅญ˜JSON"""
98
+ try:
99
+ async with lock:
100
+ await self._write(path, orjson.dumps(data, option=orjson.OPT_INDENT_2).decode())
101
+ except Exception as e:
102
+ logger.error(f"[Storage] ไฟๅญ˜{path.name}ๅคฑ่ดฅ: {e}")
103
+ raise
104
+
105
+ async def _load_toml(self, path: Path, default: Dict, lock: asyncio.Lock) -> Dict[str, Any]:
106
+ """ๅŠ ่ฝฝTOML"""
107
+ try:
108
+ async with lock:
109
+ if not path.exists():
110
+ return default
111
+ return toml.loads(await self._read(path))
112
+ except Exception as e:
113
+ logger.error(f"[Storage] ๅŠ ่ฝฝ{path.name}ๅคฑ่ดฅ: {e}")
114
+ return default
115
+
116
+ async def _save_toml(self, path: Path, data: Dict, lock: asyncio.Lock) -> None:
117
+ """ไฟๅญ˜TOML"""
118
+ try:
119
+ async with lock:
120
+ await self._write(path, toml.dumps(data))
121
+ except Exception as e:
122
+ logger.error(f"[Storage] ไฟๅญ˜{path.name}ๅคฑ่ดฅ: {e}")
123
+ raise
124
+
125
+ async def load_tokens(self) -> Dict[str, Any]:
126
+ """ๅŠ ่ฝฝtoken"""
127
+ return await self._load_json(self.token_file, {"sso": {}, "ssoSuper": {}}, self._token_lock)
128
+
129
+ async def save_tokens(self, data: Dict[str, Any]) -> None:
130
+ """ไฟๅญ˜token"""
131
+ await self._save_json(self.token_file, data, self._token_lock)
132
+
133
+ async def load_config(self) -> Dict[str, Any]:
134
+ """ๅŠ ่ฝฝ้…็ฝฎ"""
135
+ return await self._load_toml(self.config_file, {"global": {}, "grok": {}}, self._config_lock)
136
+
137
+ async def save_config(self, data: Dict[str, Any]) -> None:
138
+ """ไฟๅญ˜้…็ฝฎ"""
139
+ await self._save_toml(self.config_file, data, self._config_lock)
140
+
141
+
142
+ class MysqlStorage(BaseStorage):
143
+ """MySQLๅญ˜ๅ‚จ"""
144
+
145
+ def __init__(self, database_url: str, data_dir: Path):
146
+ self.database_url = database_url
147
+ self.data_dir = data_dir
148
+ self._pool = None
149
+ self._file = FileStorage(data_dir)
150
+
151
+ async def init_db(self) -> None:
152
+ """ๅˆๅง‹ๅŒ–MySQL"""
153
+ try:
154
+ import aiomysql
155
+ parsed = self._parse_url(self.database_url)
156
+ logger.info(f"[Storage] MySQL: {parsed['user']}@{parsed['host']}:{parsed['port']}/{parsed['db']}")
157
+
158
+ await self._create_db(parsed)
159
+ self._pool = await aiomysql.create_pool(
160
+ host=parsed['host'], port=parsed['port'], user=parsed['user'],
161
+ password=parsed['password'], db=parsed['db'], charset="utf8mb4",
162
+ autocommit=True, maxsize=10
163
+ )
164
+ await self._create_tables()
165
+ await self._file.init_db()
166
+ await self._sync_data()
167
+
168
+ except ImportError:
169
+ raise Exception("aiomysqlๆœชๅฎ‰่ฃ…")
170
+ except Exception as e:
171
+ logger.error(f"[Storage] MySQLๅˆๅง‹ๅŒ–ๅคฑ่ดฅ: {e}")
172
+ raise
173
+
174
+ def _parse_url(self, url: str) -> Dict[str, Any]:
175
+ """่งฃๆžURL"""
176
+ p = urlparse(url)
177
+ return {
178
+ 'user': unquote(p.username) if p.username else "",
179
+ 'password': unquote(p.password) if p.password else "",
180
+ 'host': p.hostname,
181
+ 'port': p.port or 3306,
182
+ 'db': p.path[1:] if p.path else "grok2api"
183
+ }
184
+
185
+ async def _create_db(self, parsed: Dict) -> None:
186
+ """ๅˆ›ๅปบๆ•ฐๆฎๅบ“"""
187
+ import aiomysql
188
+ pool = await aiomysql.create_pool(
189
+ host=parsed['host'], port=parsed['port'], user=parsed['user'],
190
+ password=parsed['password'], charset="utf8mb4", autocommit=True, maxsize=1
191
+ )
192
+
193
+ try:
194
+ async with pool.acquire() as conn:
195
+ async with conn.cursor() as cursor:
196
+ with warnings.catch_warnings():
197
+ warnings.filterwarnings('ignore', message='.*database exists')
198
+ await cursor.execute(
199
+ f"CREATE DATABASE IF NOT EXISTS `{parsed['db']}` "
200
+ f"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci"
201
+ )
202
+ logger.info(f"[Storage] ๆ•ฐๆฎๅบ“ '{parsed['db']}' ๅฐฑ็ปช")
203
+ finally:
204
+ pool.close()
205
+ await pool.wait_closed()
206
+
207
+ async def _create_tables(self) -> None:
208
+ """ๅˆ›ๅปบ่กจ"""
209
+ tables = {
210
+ "grok_tokens": """
211
+ CREATE TABLE IF NOT EXISTS grok_tokens (
212
+ id INT AUTO_INCREMENT PRIMARY KEY,
213
+ data JSON NOT NULL,
214
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
215
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
216
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
217
+ """,
218
+ "grok_settings": """
219
+ CREATE TABLE IF NOT EXISTS grok_settings (
220
+ id INT AUTO_INCREMENT PRIMARY KEY,
221
+ data JSON NOT NULL,
222
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
223
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
224
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
225
+ """
226
+ }
227
+
228
+ async with self._pool.acquire() as conn:
229
+ async with conn.cursor() as cursor:
230
+ with warnings.catch_warnings():
231
+ warnings.filterwarnings('ignore', message='.*already exists')
232
+ for sql in tables.values():
233
+ await cursor.execute(sql)
234
+ logger.info("[Storage] MySQL่กจๅฐฑ็ปช")
235
+
236
+ async def _sync_data(self) -> None:
237
+ """ๅŒๆญฅๆ•ฐๆฎ"""
238
+ try:
239
+ for table, key in [("grok_tokens", "sso"), ("grok_settings", "global")]:
240
+ data = await self._load_db(table)
241
+ if data:
242
+ if table == "grok_tokens":
243
+ await self._file.save_tokens(data)
244
+ else:
245
+ await self._file.save_config(data)
246
+ logger.info(f"[Storage] {table.split('_')[1]}ๆ•ฐๆฎๅทฒไปŽDBๅŒๆญฅ")
247
+ else:
248
+ file_data = await (self._file.load_tokens() if table == "grok_tokens" else self._file.load_config())
249
+ if file_data.get(key) or (table == "grok_tokens" and file_data.get("ssoSuper")):
250
+ await self._save_db(table, file_data)
251
+ logger.info(f"[Storage] {table.split('_')[1]}ๆ•ฐๆฎๅทฒๅˆๅง‹ๅŒ–ๅˆฐDB")
252
+ except Exception as e:
253
+ logger.warning(f"[Storage] ๅŒๆญฅๅคฑ่ดฅ: {e}")
254
+
255
+ async def _load_db(self, table: str) -> Optional[Dict]:
256
+ """ไปŽDBๅŠ ่ฝฝ"""
257
+ try:
258
+ async with self._pool.acquire() as conn:
259
+ async with conn.cursor() as cursor:
260
+ await cursor.execute(f"SELECT data FROM {table} ORDER BY id DESC LIMIT 1")
261
+ result = await cursor.fetchone()
262
+ return orjson.loads(result[0]) if result else None
263
+ except Exception as e:
264
+ logger.error(f"[Storage] ๅŠ ่ฝฝ{table}ๅคฑ่ดฅ: {e}")
265
+ return None
266
+
267
+ async def _save_db(self, table: str, data: Dict) -> None:
268
+ """ไฟๅญ˜ๅˆฐDB"""
269
+ try:
270
+ async with self._pool.acquire() as conn:
271
+ async with conn.cursor() as cursor:
272
+ json_data = orjson.dumps(data).decode()
273
+ await cursor.execute(f"SELECT id FROM {table} ORDER BY id DESC LIMIT 1")
274
+ result = await cursor.fetchone()
275
+
276
+ if result:
277
+ await cursor.execute(f"UPDATE {table} SET data = %s WHERE id = %s", (json_data, result[0]))
278
+ else:
279
+ await cursor.execute(f"INSERT INTO {table} (data) VALUES (%s)", (json_data,))
280
+ except Exception as e:
281
+ logger.error(f"[Storage] ไฟๅญ˜{table}ๅคฑ่ดฅ: {e}")
282
+ raise
283
+
284
+ async def load_tokens(self) -> Dict[str, Any]:
285
+ """ๅŠ ่ฝฝtoken"""
286
+ return await self._file.load_tokens()
287
+
288
+ async def save_tokens(self, data: Dict[str, Any]) -> None:
289
+ """ไฟๅญ˜token"""
290
+ await self._file.save_tokens(data)
291
+ await self._save_db("grok_tokens", data)
292
+
293
+ async def load_config(self) -> Dict[str, Any]:
294
+ """ๅŠ ่ฝฝ้…็ฝฎ"""
295
+ return await self._file.load_config()
296
+
297
+ async def save_config(self, data: Dict[str, Any]) -> None:
298
+ """ไฟๅญ˜้…็ฝฎ"""
299
+ await self._file.save_config(data)
300
+ await self._save_db("grok_settings", data)
301
+
302
+ async def close(self) -> None:
303
+ """ๅ…ณ้—ญ่ฟžๆŽฅ"""
304
+ if self._pool:
305
+ self._pool.close()
306
+ await self._pool.wait_closed()
307
+ logger.info("[Storage] MySQLๅทฒๅ…ณ้—ญ")
308
+
309
+
310
+ class RedisStorage(BaseStorage):
311
+ """Redisๅญ˜ๅ‚จ"""
312
+
313
+ def __init__(self, redis_url: str, data_dir: Path):
314
+ self.redis_url = redis_url
315
+ self.data_dir = data_dir
316
+ self._redis = None
317
+ self._file = FileStorage(data_dir)
318
+
319
+ async def init_db(self) -> None:
320
+ """ๅˆๅง‹ๅŒ–Redis"""
321
+ try:
322
+ import redis.asyncio as aioredis
323
+ parsed = urlparse(self.redis_url)
324
+ db = int(parsed.path.lstrip('/')) if parsed.path and parsed.path != '/' else 0
325
+ logger.info(f"[Storage] Redis: {parsed.hostname}:{parsed.port or 6379}/{db}")
326
+
327
+ self._redis = aioredis.Redis.from_url(
328
+ self.redis_url, encoding="utf-8", decode_responses=True
329
+ )
330
+
331
+ await self._redis.ping()
332
+ logger.info(f"[Storage] Redis่ฟžๆŽฅๆˆๅŠŸ")
333
+
334
+ await self._file.init_db()
335
+ await self._sync_data()
336
+
337
+ except ImportError:
338
+ raise Exception("redisๆœชๅฎ‰่ฃ…")
339
+ except Exception as e:
340
+ logger.error(f"[Storage] Redisๅˆๅง‹ๅŒ–ๅคฑ่ดฅ: {e}")
341
+ raise
342
+
343
+ async def _sync_data(self) -> None:
344
+ """ๅŒๆญฅๆ•ฐๆฎ"""
345
+ try:
346
+ for key, file_func, key_name in [
347
+ ("grok:tokens", self._file.load_tokens, "sso"),
348
+ ("grok:settings", self._file.load_config, "global")
349
+ ]:
350
+ data = await self._redis.get(key)
351
+ if data:
352
+ parsed = orjson.loads(data)
353
+ if key == "grok:tokens":
354
+ await self._file.save_tokens(parsed)
355
+ else:
356
+ await self._file.save_config(parsed)
357
+ logger.info(f"[Storage] {key.split(':')[1]}ๆ•ฐๆฎๅทฒไปŽRedisๅŒๆญฅ")
358
+ else:
359
+ file_data = await file_func()
360
+ if file_data.get(key_name) or (key == "grok:tokens" and file_data.get("ssoSuper")):
361
+ await self._redis.set(key, orjson.dumps(file_data).decode())
362
+ logger.info(f"[Storage] {key.split(':')[1]}ๆ•ฐๆฎๅทฒๅˆๅง‹ๅŒ–ๅˆฐRedis")
363
+ except Exception as e:
364
+ logger.warning(f"[Storage] ๅŒๆญฅๅคฑ่ดฅ: {e}")
365
+
366
+ async def _save_redis(self, key: str, data: Dict) -> None:
367
+ """ไฟๅญ˜ๅˆฐRedis"""
368
+ try:
369
+ await self._redis.set(key, orjson.dumps(data).decode())
370
+ except Exception as e:
371
+ logger.error(f"[Storage] ไฟๅญ˜Redisๅคฑ่ดฅ: {e}")
372
+ raise
373
+
374
+ async def load_tokens(self) -> Dict[str, Any]:
375
+ """ๅŠ ่ฝฝtoken"""
376
+ return await self._file.load_tokens()
377
+
378
+ async def save_tokens(self, data: Dict[str, Any]) -> None:
379
+ """ไฟๅญ˜token"""
380
+ await self._file.save_tokens(data)
381
+ await self._save_redis("grok:tokens", data)
382
+
383
+ async def load_config(self) -> Dict[str, Any]:
384
+ """ๅŠ ่ฝฝ้…็ฝฎ"""
385
+ return await self._file.load_config()
386
+
387
+ async def save_config(self, data: Dict[str, Any]) -> None:
388
+ """ไฟๅญ˜้…็ฝฎ"""
389
+ await self._file.save_config(data)
390
+ await self._save_redis("grok:settings", data)
391
+
392
+ async def close(self) -> None:
393
+ """ๅ…ณ้—ญ่ฟžๆŽฅ"""
394
+ if self._redis:
395
+ await self._redis.close()
396
+ logger.info("[Storage] Redisๅทฒๅ…ณ้—ญ")
397
+
398
+
399
+ class StorageManager:
400
+ """ๅญ˜ๅ‚จ็ฎก็†ๅ™จ๏ผˆๅ•ไพ‹๏ผ‰"""
401
+
402
+ _instance: Optional['StorageManager'] = None
403
+ _storage: Optional[BaseStorage] = None
404
+ _initialized: bool = False
405
+
406
+ def __new__(cls):
407
+ if cls._instance is None:
408
+ cls._instance = super().__new__(cls)
409
+ return cls._instance
410
+
411
+ async def init(self) -> None:
412
+ """ๅˆๅง‹ๅŒ–ๅญ˜ๅ‚จ"""
413
+ if self._initialized:
414
+ return
415
+
416
+ mode = os.getenv("STORAGE_MODE", "file").lower()
417
+ url = os.getenv("DATABASE_URL", "")
418
+ data_dir = Path(__file__).parents[2] / "data"
419
+
420
+ classes = {"mysql": MysqlStorage, "redis": RedisStorage, "file": FileStorage}
421
+
422
+ if mode in ("mysql", "redis") and not url:
423
+ raise ValueError(f"{mode.upper()}ๆจกๅผ้œ€่ฆDATABASE_URL")
424
+
425
+ storage_class = classes.get(mode, FileStorage)
426
+ self._storage = storage_class(url, data_dir) if mode != "file" else storage_class(data_dir)
427
+
428
+ await self._storage.init_db()
429
+ self._initialized = True
430
+ logger.info(f"[Storage] ไฝฟ็”จ{mode}ๆจกๅผ")
431
+
432
+ def get_storage(self) -> BaseStorage:
433
+ """่Žทๅ–ๅญ˜ๅ‚จๅฎžไพ‹"""
434
+ if not self._initialized or not self._storage:
435
+ raise RuntimeError("StorageManagerๆœชๅˆๅง‹ๅŒ–")
436
+ return self._storage
437
+
438
+ async def close(self) -> None:
439
+ """ๅ…ณ้—ญๅญ˜ๅ‚จ"""
440
+ if self._storage and hasattr(self._storage, 'close'):
441
+ await self._storage.close()
442
+
443
+
444
+ # ๅ…จๅฑ€ๅฎžไพ‹
445
+ storage_manager = StorageManager()
app/models/grok_models.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Grok ๆจกๅž‹้…็ฝฎๅ’Œๆžšไธพๅฎšไน‰"""
2
+
3
+ from enum import Enum
4
+ from typing import Dict, Any, Tuple
5
+
6
+
7
+ # ๆจกๅž‹้…็ฝฎ
8
+ _MODEL_CONFIG: Dict[str, Dict[str, Any]] = {
9
+ "grok-3-fast": {
10
+ "grok_model": ("grok-3", "MODEL_MODE_FAST"),
11
+ "rate_limit_model": "grok-3",
12
+ "cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐƒ็”จ"},
13
+ "requires_super": False,
14
+ "display_name": "Grok 3 Fast",
15
+ "description": "Fast and efficient Grok 3 model",
16
+ "raw_model_path": "xai/grok-3",
17
+ "default_temperature": 1.0,
18
+ "default_max_output_tokens": 8192,
19
+ "supported_max_output_tokens": 131072,
20
+ "default_top_p": 0.95
21
+ },
22
+ "grok-4-fast": {
23
+ "grok_model": ("grok-4-mini-thinking-tahoe", "MODEL_MODE_GROK_4_MINI_THINKING"),
24
+ "rate_limit_model": "grok-4-mini-thinking-tahoe",
25
+ "cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐƒ็”จ"},
26
+ "requires_super": False,
27
+ "display_name": "Grok 4 Fast",
28
+ "description": "Fast version of Grok 4 with mini thinking capabilities",
29
+ "raw_model_path": "xai/grok-4-mini-thinking-tahoe",
30
+ "default_temperature": 1.0,
31
+ "default_max_output_tokens": 8192,
32
+ "supported_max_output_tokens": 131072,
33
+ "default_top_p": 0.95
34
+ },
35
+ "grok-4-fast-expert": {
36
+ "grok_model": ("grok-4-mini-thinking-tahoe", "MODEL_MODE_EXPERT"),
37
+ "rate_limit_model": "grok-4-mini-thinking-tahoe",
38
+ "cost": {"type": "high_cost", "multiplier": 4, "description": "่ฎก4ๆฌก่ฐƒ็”จ"},
39
+ "requires_super": False,
40
+ "display_name": "Grok 4 Fast Expert",
41
+ "description": "Expert mode of Grok 4 Fast with enhanced reasoning",
42
+ "raw_model_path": "xai/grok-4-mini-thinking-tahoe",
43
+ "default_temperature": 1.0,
44
+ "default_max_output_tokens": 32768,
45
+ "supported_max_output_tokens": 131072,
46
+ "default_top_p": 0.95
47
+ },
48
+ "grok-4-expert": {
49
+ "grok_model": ("grok-4", "MODEL_MODE_EXPERT"),
50
+ "rate_limit_model": "grok-4",
51
+ "cost": {"type": "high_cost", "multiplier": 4, "description": "่ฎก4ๆฌก่ฐƒ็”จ"},
52
+ "requires_super": False,
53
+ "display_name": "Grok 4 Expert",
54
+ "description": "Full Grok 4 model with expert mode capabilities",
55
+ "raw_model_path": "xai/grok-4",
56
+ "default_temperature": 1.0,
57
+ "default_max_output_tokens": 32768,
58
+ "supported_max_output_tokens": 131072,
59
+ "default_top_p": 0.95
60
+ },
61
+ "grok-4-heavy": {
62
+ "grok_model": ("grok-4-heavy", "MODEL_MODE_HEAVY"),
63
+ "rate_limit_model": "grok-4-heavy",
64
+ "cost": {"type": "independent", "multiplier": 1, "description": "็‹ฌ็ซ‹่ฎก่ดน๏ผŒๅชๆœ‰Super็”จๆˆทๅฏ็”จ"},
65
+ "requires_super": True,
66
+ "display_name": "Grok 4 Heavy",
67
+ "description": "Most powerful Grok 4 model with heavy computational capabilities. Requires Super Token for access.",
68
+ "raw_model_path": "xai/grok-4-heavy",
69
+ "default_temperature": 1.0,
70
+ "default_max_output_tokens": 65536,
71
+ "supported_max_output_tokens": 131072,
72
+ "default_top_p": 0.95
73
+ },
74
+ "grok-4.1": {
75
+ "grok_model": ("grok-4-1-non-thinking-w-tool", "MODEL_MODE_GROK_4_1"),
76
+ "rate_limit_model": "grok-4-1-non-thinking-w-tool",
77
+ "cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐƒ็”จ"},
78
+ "requires_super": False,
79
+ "display_name": "Grok 4.1",
80
+ "description": "Latest Grok 4.1 model with tool capabilities",
81
+ "raw_model_path": "xai/grok-4-1-non-thinking-w-tool",
82
+ "default_temperature": 1.0,
83
+ "default_max_output_tokens": 8192,
84
+ "supported_max_output_tokens": 131072,
85
+ "default_top_p": 0.95
86
+ },
87
+ "grok-4.1-thinking": {
88
+ "grok_model": ("grok-4-1-thinking-1108b", "MODEL_MODE_AUTO"),
89
+ "rate_limit_model": "grok-4-1-thinking-1108b",
90
+ "cost": {"type": "high_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐƒ็”จ"},
91
+ "requires_super": False,
92
+ "display_name": "Grok 4.1 Thinking",
93
+ "description": "Grok 4.1 model with advanced thinking and tool capabilities",
94
+ "raw_model_path": "xai/grok-4-1-thinking-1108b",
95
+ "default_temperature": 1.0,
96
+ "default_max_output_tokens": 32768,
97
+ "supported_max_output_tokens": 131072,
98
+ "default_top_p": 0.95
99
+ },
100
+ "grok-imagine-0.9": {
101
+ "grok_model": ("grok-3", "MODEL_MODE_FAST"),
102
+ "rate_limit_model": "grok-3",
103
+ "cost": {"type": "low_cost", "multiplier": 1, "description": "่ฎก1ๆฌก่ฐƒ็”จ"},
104
+ "requires_super": False,
105
+ "display_name": "Grok Imagine 0.9",
106
+ "description": "Image and video generation model. Supports text-to-image and image-to-video generation.",
107
+ "raw_model_path": "xai/grok-imagine-0.9",
108
+ "default_temperature": 1.0,
109
+ "default_max_output_tokens": 8192,
110
+ "supported_max_output_tokens": 131072,
111
+ "default_top_p": 0.95,
112
+ "is_video_model": True
113
+ }
114
+ }
115
+
116
+
117
+ class TokenType(Enum):
118
+ """Token็ฑปๅž‹"""
119
+ NORMAL = "ssoNormal"
120
+ SUPER = "ssoSuper"
121
+
122
+
123
+ class Models(Enum):
124
+ """ๆ”ฏๆŒ็š„ๆจกๅž‹"""
125
+ GROK_3_FAST = "grok-3-fast"
126
+ GROK_4_1 = "grok-4.1"
127
+ GROK_4_1_THINKING = "grok-4.1-thinking"
128
+ GROK_4_FAST = "grok-4-fast"
129
+ GROK_4_FAST_EXPERT = "grok-4-fast-expert"
130
+ GROK_4_EXPERT = "grok-4-expert"
131
+ GROK_4_HEAVY = "grok-4-heavy"
132
+ GROK_IMAGINE_0_9 = "grok-imagine-0.9"
133
+
134
+ @classmethod
135
+ def get_model_info(cls, model: str) -> Dict[str, Any]:
136
+ """่Žทๅ–ๆจกๅž‹้…็ฝฎ"""
137
+ return _MODEL_CONFIG.get(model, {})
138
+
139
+ @classmethod
140
+ def is_valid_model(cls, model: str) -> bool:
141
+ """ๆฃ€ๆŸฅๆจกๅž‹ๆ˜ฏๅฆๆœ‰ๆ•ˆ"""
142
+ return model in _MODEL_CONFIG
143
+
144
+ @classmethod
145
+ def to_grok(cls, model: str) -> Tuple[str, str]:
146
+ """่ฝฌๆขไธบGrokๅ†…้ƒจๆจกๅž‹ๅๅ’Œๆจกๅผ
147
+
148
+ Returns:
149
+ (ๆจกๅž‹ๅ, ๆจกๅผ็ฑปๅž‹) ๅ…ƒ็ป„
150
+ """
151
+ config = _MODEL_CONFIG.get(model)
152
+ return config["grok_model"] if config else (model, "MODEL_MODE_FAST")
153
+
154
+ @classmethod
155
+ def to_rate_limit(cls, model: str) -> str:
156
+ """่ฝฌๆขไธบ้€Ÿ็އ้™ๅˆถๆจกๅž‹ๅ"""
157
+ config = _MODEL_CONFIG.get(model)
158
+ return config["rate_limit_model"] if config else model
159
+
160
+ @classmethod
161
+ def get_all_model_names(cls) -> list[str]:
162
+ """่Žทๅ–ๆ‰€ๆœ‰ๆจกๅž‹ๅ็งฐ"""
163
+ return list(_MODEL_CONFIG.keys())
app/models/openai_schema.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """OpenAI ่ฏทๆฑ‚-ๅ“ๅบ”ๆจกๅž‹ๅฎšไน‰"""
2
+
3
+ from fastapi import HTTPException
4
+ from typing import Optional, List, Union, Dict, Any
5
+ from pydantic import BaseModel, Field, field_validator
6
+
7
+ from app.models.grok_models import Models
8
+
9
+
10
+ class OpenAIChatRequest(BaseModel):
11
+ """OpenAI่Šๅคฉ่ฏทๆฑ‚"""
12
+
13
+ model: str = Field(..., description="ๆจกๅž‹ๅ็งฐ", min_length=1)
14
+ messages: List[Dict[str, Any]] = Field(..., description="ๆถˆๆฏๅˆ—่กจ", min_length=1)
15
+ stream: bool = Field(False, description="ๆตๅผๅ“ๅบ”")
16
+ temperature: Optional[float] = Field(0.7, ge=0, le=2, description="้‡‡ๆ ทๆธฉๅบฆ")
17
+ max_tokens: Optional[int] = Field(None, ge=1, le=100000, description="ๆœ€ๅคงTokenๆ•ฐ")
18
+ top_p: Optional[float] = Field(1.0, ge=0, le=1, description="้‡‡ๆ ทๅ‚ๆ•ฐ")
19
+
20
+ @classmethod
21
+ @field_validator('messages')
22
+ def validate_messages(cls, v):
23
+ """้ชŒ่ฏๆถˆๆฏๆ ผๅผ"""
24
+ if not v:
25
+ raise HTTPException(status_code=400, detail="ๆถˆๆฏๅˆ—่กจไธ่ƒฝไธบ็ฉบ")
26
+
27
+ for msg in v:
28
+ if not isinstance(msg, dict):
29
+ raise HTTPException(status_code=400, detail="ๆฏไธชๆถˆๆฏๅฟ…้กปๆ˜ฏๅญ—ๅ…ธ")
30
+ if 'role' not in msg:
31
+ raise HTTPException(status_code=400, detail="ๆถˆๆฏ็ผบๅฐ‘ 'role' ๅญ—ๆฎต")
32
+ if 'content' not in msg:
33
+ raise HTTPException(status_code=400, detail="ๆถˆๆฏ็ผบๅฐ‘ 'content' ๅญ—ๆฎต")
34
+ if msg['role'] not in ['system', 'user', 'assistant']:
35
+ raise HTTPException(
36
+ status_code=400,
37
+ detail=f"ๆ— ๆ•ˆ่ง’่‰ฒ '{msg['role']}', ๅฟ…้กปๆ˜ฏ system/user/assistant"
38
+ )
39
+
40
+ return v
41
+
42
+ @classmethod
43
+ @field_validator('model')
44
+ def validate_model(cls, v):
45
+ """้ชŒ่ฏๆจกๅž‹ๅ็งฐ"""
46
+ if not Models.is_valid_model(v):
47
+ supported = Models.get_all_model_names()
48
+ raise HTTPException(
49
+ status_code=400,
50
+ detail=f"ไธๆ”ฏๆŒ็š„ๆจกๅž‹ '{v}', ๆ”ฏๆŒ: {', '.join(supported)}"
51
+ )
52
+ return v
53
+
54
+
55
+ class OpenAIChatCompletionMessage(BaseModel):
56
+ """่ŠๅคฉๅฎŒๆˆๆถˆๆฏ"""
57
+ role: str = Field(..., description="่ง’่‰ฒ")
58
+ content: str = Field(..., description="ๅ†…ๅฎน")
59
+ reference_id: Optional[str] = Field(default=None, description="ๅ‚่€ƒID")
60
+ annotations: Optional[List[str]] = Field(default=None, description="ๆณจ้‡Š")
61
+
62
+
63
+ class OpenAIChatCompletionChoice(BaseModel):
64
+ """่ŠๅคฉๅฎŒๆˆ้€‰้กน"""
65
+ index: int = Field(..., description="็ดขๅผ•")
66
+ message: OpenAIChatCompletionMessage = Field(..., description="ๆถˆๆฏ")
67
+ logprobs: Optional[float] = Field(default=None, description="ๅฏนๆ•ฐๆฆ‚็އ")
68
+ finish_reason: str = Field(default="stop", description="ๅฎŒๆˆๅŽŸๅ› ")
69
+
70
+
71
+ class OpenAIChatCompletionResponse(BaseModel):
72
+ """่ŠๅคฉๅฎŒๆˆๅ“ๅบ”"""
73
+ id: str = Field(..., description="ๅ“ๅบ”ID")
74
+ object: str = Field("chat.completion", description="ๅฏน่ฑก็ฑปๅž‹")
75
+ created: int = Field(..., description="ๅˆ›ๅปบๆ—ถ้—ดๆˆณ")
76
+ model: str = Field(..., description="ๆจกๅž‹")
77
+ choices: List[OpenAIChatCompletionChoice] = Field(..., description="้€‰้กน")
78
+ usage: Optional[Dict[str, Any]] = Field(None, description="ไปค็‰Œไฝฟ็”จ")
79
+
80
+
81
+ class OpenAIChatCompletionChunkMessage(BaseModel):
82
+ """ๆตๅผๆถˆๆฏ็‰‡ๆฎต"""
83
+ role: str = Field(..., description="่ง’่‰ฒ")
84
+ content: str = Field(..., description="ๅ†…ๅฎน")
85
+
86
+
87
+ class OpenAIChatCompletionChunkChoice(BaseModel):
88
+ """ๆตๅผ้€‰้กน"""
89
+ index: int = Field(..., description="็ดขๅผ•")
90
+ delta: Optional[Union[Dict[str, Any], OpenAIChatCompletionChunkMessage]] = Field(
91
+ None, description="Deltaๆ•ฐๆฎ"
92
+ )
93
+ finish_reason: Optional[str] = Field(None, description="ๅฎŒๆˆๅŽŸๅ› ")
94
+
95
+
96
+ class OpenAIChatCompletionChunkResponse(BaseModel):
97
+ """ๆตๅผ่Šๅคฉๅ“ๅบ”"""
98
+ id: str = Field(..., description="ๅ“ๅบ”ID")
99
+ object: str = Field(default="chat.completion.chunk", description="ๅฏน่ฑก็ฑปๅž‹")
100
+ created: int = Field(..., description="ๅˆ›ๅปบๆ—ถ้—ดๆˆณ")
101
+ model: str = Field(..., description="ๆจกๅž‹")
102
+ system_fingerprint: Optional[str] = Field(default=None, description="็ณป็ปŸๆŒ‡็บน")
103
+ choices: List[OpenAIChatCompletionChunkChoice] = Field(..., description="้€‰้กน")
app/services/api_keys.py ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """API Key ็ฎก็†ๅ™จ - ๅคš็”จๆˆทๅฏ†้’ฅ็ฎก็†"""
2
+
3
+ import orjson
4
+ import time
5
+ import secrets
6
+ import asyncio
7
+ from typing import List, Dict, Optional
8
+ from pathlib import Path
9
+
10
+ from app.core.logger import logger
11
+ from app.core.config import setting
12
+
13
+
14
+ class ApiKeyManager:
15
+ """API Key ็ฎก็†ๆœๅŠก"""
16
+
17
+ _instance = None
18
+
19
+ def __new__(cls):
20
+ if cls._instance is None:
21
+ cls._instance = super().__new__(cls)
22
+ return cls._instance
23
+
24
+ def __init__(self):
25
+ if hasattr(self, '_initialized'):
26
+ return
27
+
28
+ self.file_path = Path(__file__).parents[2] / "data" / "api_keys.json"
29
+ self._keys: List[Dict] = []
30
+ self._lock = asyncio.Lock()
31
+ self._loaded = False
32
+
33
+ self._initialized = True
34
+ logger.debug(f"[ApiKey] ๅˆๅง‹ๅŒ–ๅฎŒๆˆ: {self.file_path}")
35
+
36
+ async def init(self):
37
+ """ๅˆๅง‹ๅŒ–ๅŠ ่ฝฝๆ•ฐๆฎ"""
38
+ if not self._loaded:
39
+ await self._load_data()
40
+
41
+ async def _load_data(self):
42
+ """ๅŠ ่ฝฝ API Keys"""
43
+ if self._loaded:
44
+ return
45
+
46
+ if not self.file_path.exists():
47
+ self._keys = []
48
+ self._loaded = True
49
+ return
50
+
51
+ try:
52
+ async with self._lock:
53
+ if self.file_path.exists():
54
+ content = await asyncio.to_thread(self.file_path.read_bytes)
55
+ if content:
56
+ self._keys = orjson.loads(content)
57
+ self._loaded = True
58
+ logger.debug(f"[ApiKey] ๅŠ ่ฝฝไบ† {len(self._keys)} ไธช API Key")
59
+ except Exception as e:
60
+ logger.error(f"[ApiKey] ๅŠ ่ฝฝๅคฑ่ดฅ: {e}")
61
+ self._keys = []
62
+ self._loaded = True # ๅณไฝฟๅŠ ่ฝฝๅคฑ่ดฅไนŸ่ฎคไธบๅทฒๅฐ่ฏ•ๅŠ ่ฝฝ๏ผŒ้˜ฒๆญขๅŽ็ปญไฟๅญ˜ๆธ…็ฉบๆ•ฐๆฎ๏ผˆๆˆ–่€…ๆŠ›ๅ‡บๅผ‚ๅธธ๏ผ‰
63
+
64
+ async def _save_data(self):
65
+ """ไฟๅญ˜ API Keys"""
66
+ if not self._loaded:
67
+ logger.warning("[ApiKey] ๅฐ่ฏ•ๅœจๆ•ฐๆฎๆœชๅŠ ่ฝฝๆ—ถไฟๅญ˜๏ผŒๅทฒๅ–ๆถˆๆ“ไฝœไปฅ้˜ฒ่ฆ†็›–ๆ•ฐๆฎ")
68
+ return
69
+
70
+ try:
71
+ # ็กฎไฟ็›ฎๅฝ•ๅญ˜ๅœจ
72
+ self.file_path.parent.mkdir(parents=True, exist_ok=True)
73
+
74
+ async with self._lock:
75
+ content = orjson.dumps(self._keys, option=orjson.OPT_INDENT_2)
76
+ await asyncio.to_thread(self.file_path.write_bytes, content)
77
+ except Exception as e:
78
+ logger.error(f"[ApiKey] ไฟๅญ˜ๅคฑ่ดฅ: {e}")
79
+
80
+ def generate_key(self) -> str:
81
+ """็”Ÿๆˆไธ€ไธชๆ–ฐ็š„ sk- ๅผ€ๅคด็š„ key"""
82
+ return f"sk-{secrets.token_urlsafe(24)}"
83
+
84
+ async def add_key(self, name: str) -> Dict:
85
+ """ๆทปๅŠ  API Key"""
86
+ new_key = {
87
+ "key": self.generate_key(),
88
+ "name": name,
89
+ "created_at": int(time.time()),
90
+ "is_active": True
91
+ }
92
+ self._keys.append(new_key)
93
+ await self._save_data()
94
+ logger.info(f"[ApiKey] ๆทปๅŠ ๆ–ฐKey: {name}")
95
+ return new_key
96
+
97
+ async def batch_add_keys(self, name_prefix: str, count: int) -> List[Dict]:
98
+ """ๆ‰น้‡ๆทปๅŠ  API Key"""
99
+ new_keys = []
100
+ for i in range(1, count + 1):
101
+ name = f"{name_prefix}-{i}" if count > 1 else name_prefix
102
+ new_keys.append({
103
+ "key": self.generate_key(),
104
+ "name": name,
105
+ "created_at": int(time.time()),
106
+ "is_active": True
107
+ })
108
+
109
+ self._keys.extend(new_keys)
110
+ await self._save_data()
111
+ logger.info(f"[ApiKey] ๆ‰น้‡ๆทปๅŠ  {count} ไธช Key, ๅ‰็ผ€: {name_prefix}")
112
+ return new_keys
113
+
114
+ async def delete_key(self, key: str) -> bool:
115
+ """ๅˆ ้™ค API Key"""
116
+ initial_len = len(self._keys)
117
+ self._keys = [k for k in self._keys if k["key"] != key]
118
+
119
+ if len(self._keys) != initial_len:
120
+ await self._save_data()
121
+ logger.info(f"[ApiKey] ๅˆ ้™คKey: {key[:10]}...")
122
+ return True
123
+ return False
124
+
125
+ async def batch_delete_keys(self, keys: List[str]) -> int:
126
+ """ๆ‰น้‡ๅˆ ้™ค API Key"""
127
+ initial_len = len(self._keys)
128
+ self._keys = [k for k in self._keys if k["key"] not in keys]
129
+
130
+ deleted_count = initial_len - len(self._keys)
131
+ if deleted_count > 0:
132
+ await self._save_data()
133
+ logger.info(f"[ApiKey] ๆ‰น้‡ๅˆ ้™ค {deleted_count} ไธช Key")
134
+ return deleted_count
135
+
136
+ async def update_key_status(self, key: str, is_active: bool) -> bool:
137
+ """ๆ›ดๆ–ฐ Key ็Šถๆ€"""
138
+ for k in self._keys:
139
+ if k["key"] == key:
140
+ k["is_active"] = is_active
141
+ await self._save_data()
142
+ return True
143
+ return False
144
+
145
+ async def batch_update_keys_status(self, keys: List[str], is_active: bool) -> int:
146
+ """ๆ‰น้‡ๆ›ดๆ–ฐ Key ็Šถๆ€"""
147
+ updated_count = 0
148
+ for k in self._keys:
149
+ if k["key"] in keys:
150
+ if k["is_active"] != is_active:
151
+ k["is_active"] = is_active
152
+ updated_count += 1
153
+
154
+ if updated_count > 0:
155
+ await self._save_data()
156
+ logger.info(f"[ApiKey] ๆ‰น้‡ๆ›ดๆ–ฐ {updated_count} ไธช Key ็Šถๆ€ไธบ: {is_active}")
157
+ return updated_count
158
+
159
+ async def update_key_name(self, key: str, name: str) -> bool:
160
+ """ๆ›ดๆ–ฐ Key ๅค‡ๆณจ"""
161
+ for k in self._keys:
162
+ if k["key"] == key:
163
+ k["name"] = name
164
+ await self._save_data()
165
+ return True
166
+ return False
167
+
168
+ def validate_key(self, key: str) -> Optional[Dict]:
169
+ """้ชŒ่ฏ Key๏ผŒ่ฟ”ๅ›ž Key ไฟกๆฏ"""
170
+ # 1. ๆฃ€ๆŸฅๅ…จๅฑ€้…็ฝฎ็š„ Key (ไฝœไธบ้ป˜่ฎค admin key)
171
+ global_key = setting.grok_config.get("api_key")
172
+ if global_key and key == global_key:
173
+ return {
174
+ "key": global_key,
175
+ "name": "้ป˜่ฎค็ฎก็†ๅ‘˜",
176
+ "is_active": True,
177
+ "is_admin": True
178
+ }
179
+
180
+ # 2. ๆฃ€ๆŸฅๅคš Key ๅˆ—่กจ
181
+ for k in self._keys:
182
+ if k["key"] == key:
183
+ if k["is_active"]:
184
+ return {**k, "is_admin": False} # ๆ™ฎ้€š Key ไนŸๅฏไปฅ่ง†ไธบ้ž็ฎก็†ๅ‘˜? ๆš‚ไธๅŒบๅˆ†ๆƒ้™๏ผŒๅชๅš่บซไปฝ่ฏ†ๅˆซ
185
+ return None
186
+
187
+ return None
188
+
189
+ def get_all_keys(self) -> List[Dict]:
190
+ """่Žทๅ–ๆ‰€ๆœ‰ Keys"""
191
+ return self._keys
192
+
193
+
194
+ # ๅ…จๅฑ€ๅฎžไพ‹
195
+ api_key_manager = ApiKeyManager()
app/services/grok/cache.py ADDED
@@ -0,0 +1,243 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """็ผ“ๅญ˜ๆœๅŠกๆจกๅ— - ๆไพ›ๅ›พ็‰‡ๅ’Œ่ง†้ข‘็š„ไธ‹่ฝฝใ€็ผ“ๅญ˜ๅ’Œๆธ…็†ๅŠŸ่ƒฝ"""
2
+
3
+ import asyncio
4
+ import base64
5
+ from pathlib import Path
6
+ from typing import Optional, Tuple
7
+ from curl_cffi.requests import AsyncSession
8
+
9
+ from app.core.config import setting
10
+ from app.core.logger import logger
11
+ from app.services.grok.statsig import get_dynamic_headers
12
+
13
+
14
+ # ๅธธ้‡ๅฎšไน‰
15
+ MIME_TYPES = {
16
+ '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.png': 'image/png',
17
+ '.gif': 'image/gif', '.webp': 'image/webp', '.bmp': 'image/bmp',
18
+ }
19
+ DEFAULT_MIME = 'image/jpeg'
20
+ ASSETS_URL = "https://assets.grok.com"
21
+
22
+
23
+ class CacheService:
24
+ """็ผ“ๅญ˜ๆœๅŠกๅŸบ็ฑป"""
25
+
26
+ def __init__(self, cache_type: str, timeout: float = 30.0):
27
+ self.cache_type = cache_type
28
+ self.cache_dir = Path(f"data/temp/{cache_type}")
29
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
30
+ self.timeout = timeout
31
+ self._cleanup_lock = asyncio.Lock()
32
+
33
+ def _get_path(self, file_path: str) -> Path:
34
+ """่ฝฌๆขๆ–‡ไปถ่ทฏๅพ„ไธบ็ผ“ๅญ˜่ทฏๅพ„"""
35
+ return self.cache_dir / file_path.lstrip('/').replace('/', '-')
36
+
37
+ def _log(self, level: str, msg: str):
38
+ """็ปŸไธ€ๆ—ฅๅฟ—่พ“ๅ‡บ"""
39
+ getattr(logger, level)(f"[{self.cache_type.upper()}Cache] {msg}")
40
+
41
+ def _build_headers(self, file_path: str, auth_token: str) -> dict:
42
+ """ๆž„ๅปบ่ฏทๆฑ‚ๅคด"""
43
+ cf = setting.grok_config.get("cf_clearance", "")
44
+ return {
45
+ **get_dynamic_headers(pathname=file_path),
46
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
47
+ "Sec-Fetch-Dest": "document",
48
+ "Sec-Fetch-Mode": "navigate",
49
+ "Sec-Fetch-Site": "same-site",
50
+ "Sec-Fetch-User": "?1",
51
+ "Upgrade-Insecure-Requests": "1",
52
+ "Referer": "https://grok.com/",
53
+ "Cookie": f"{auth_token};{cf}" if cf else auth_token
54
+ }
55
+
56
+ async def download(self, file_path: str, auth_token: str, timeout: Optional[float] = None) -> Optional[Path]:
57
+ """ไธ‹่ฝฝๅนถ็ผ“ๅญ˜ๆ–‡ไปถ"""
58
+ cache_path = self._get_path(file_path)
59
+ if cache_path.exists():
60
+ self._log("debug", "ๆ–‡ไปถๅทฒ็ผ“ๅญ˜")
61
+ return cache_path
62
+
63
+ # ๅค–ๅฑ‚้‡่ฏ•๏ผšๅฏ้…็ฝฎ็Šถๆ€็ ๏ผˆ401/429็ญ‰๏ผ‰
64
+ retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
65
+ MAX_OUTER_RETRY = 3
66
+
67
+ for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎž้™…้‡่ฏ•3ๆฌก
68
+ try:
69
+ # ๅ†…ๅฑ‚้‡่ฏ•๏ผš403ไปฃ็†ๆฑ ้‡่ฏ•๏ผˆcacheไฝฟ็”จ็ผ“ๅญ˜ไปฃ็†๏ผŒไธๆ”ฏๆŒไปฃ็†ๆฑ ๏ผŒๆ‰€ไปฅ403ๅช้‡่ฏ•ไธ€ๆฌก๏ผ‰
70
+ max_403_retries = 5
71
+ retry_403_count = 0
72
+
73
+ while retry_403_count <= max_403_retries:
74
+ proxy = await setting.get_proxy_async("cache")
75
+ proxies = {"http": proxy, "https": proxy} if proxy else {}
76
+
77
+ if proxy and outer_retry == 0 and retry_403_count == 0:
78
+ self._log("debug", f"ไฝฟ็”จไปฃ็†: {proxy.split('@')[-1] if '@' in proxy else proxy}")
79
+
80
+ async with AsyncSession() as session:
81
+ url = f"{ASSETS_URL}{file_path}"
82
+ if outer_retry == 0 and retry_403_count == 0:
83
+ self._log("debug", f"ไธ‹่ฝฝ: {url}")
84
+
85
+ response = await session.get(
86
+ url,
87
+ headers=self._build_headers(file_path, auth_token),
88
+ proxies=proxies,
89
+ timeout=timeout or self.timeout,
90
+ allow_redirects=True,
91
+ impersonate="chrome133a"
92
+ )
93
+
94
+ # ๆฃ€ๆŸฅ403้”™่ฏฏ - ๅ†…ๅฑ‚้‡่ฏ•(cacheไธไฝฟ็”จไปฃ็†ๆฑ ๏ผŒๆ‰€ไปฅ็›ดๆŽฅๅคฑ่ดฅ)
95
+ if response.status_code == 403:
96
+ retry_403_count += 1
97
+
98
+ if retry_403_count <= max_403_retries:
99
+ self._log("warning", f"้‡ๅˆฐ403้”™่ฏฏ๏ผŒๆญฃๅœจ้‡่ฏ• ({retry_403_count}/{max_403_retries})...")
100
+ await asyncio.sleep(0.5)
101
+ continue
102
+
103
+ self._log("error", f"403้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{retry_403_count-1}ๆฌก๏ผŒๆ”พๅผƒ")
104
+ return None
105
+
106
+ # ๆฃ€ๆŸฅๅฏ้…็ฝฎ็Šถๆ€็ ้”™่ฏฏ - ๅค–ๅฑ‚้‡่ฏ•
107
+ if response.status_code in retry_codes:
108
+ if outer_retry < MAX_OUTER_RETRY:
109
+ delay = (outer_retry + 1) * 0.1 # ๆธ่ฟ›ๅปถ่ฟŸ๏ผš0.1s, 0.2s, 0.3s
110
+ self._log("warning", f"้‡ๅˆฐ{response.status_code}้”™่ฏฏ๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผŒ็ญ‰ๅพ…{delay}s...")
111
+ await asyncio.sleep(delay)
112
+ break # ่ทณๅ‡บๅ†…ๅฑ‚ๅพช็Žฏ๏ผŒ่ฟ›ๅ…ฅๅค–ๅฑ‚้‡่ฏ•
113
+ else:
114
+ self._log("error", f"{response.status_code}้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{outer_retry}ๆฌก๏ผŒๆ”พๅผƒ")
115
+ return None
116
+
117
+ response.raise_for_status()
118
+ await asyncio.to_thread(cache_path.write_bytes, response.content)
119
+
120
+ if outer_retry > 0 or retry_403_count > 0:
121
+ self._log("info", f"้‡่ฏ•ๆˆๅŠŸ๏ผ")
122
+ else:
123
+ self._log("debug", "็ผ“ๅญ˜ๆˆๅŠŸ")
124
+
125
+ # ๅผ‚ๆญฅๆธ…็†๏ผˆๅธฆ้”™่ฏฏๅค„็†๏ผ‰
126
+ asyncio.create_task(self._safe_cleanup())
127
+ return cache_path
128
+
129
+ except Exception as e:
130
+ if outer_retry < MAX_OUTER_RETRY - 1:
131
+ self._log("warning", f"ไธ‹่ฝฝๅผ‚ๅธธ: {e}๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})...")
132
+ await asyncio.sleep(0.5)
133
+ continue
134
+
135
+ self._log("error", f"ไธ‹่ฝฝๅคฑ่ดฅ: {e}๏ผˆๅทฒ้‡่ฏ•{outer_retry}ๆฌก๏ผ‰")
136
+ return None
137
+
138
+ return None
139
+
140
+ def get_cached(self, file_path: str) -> Optional[Path]:
141
+ """่Žทๅ–ๅทฒ็ผ“ๅญ˜็š„ๆ–‡ไปถ"""
142
+ path = self._get_path(file_path)
143
+ return path if path.exists() else None
144
+
145
+ async def _safe_cleanup(self):
146
+ """ๅฎ‰ๅ…จๆธ…็†๏ผˆๆ•่Žทๅผ‚ๅธธ๏ผ‰"""
147
+ try:
148
+ await self.cleanup()
149
+ except Exception as e:
150
+ self._log("error", f"ๅŽๅฐๆธ…็†ๅคฑ่ดฅ: {e}")
151
+
152
+ async def cleanup(self):
153
+ """ๆธ…็†่ถ…้™็ผ“ๅญ˜"""
154
+ if self._cleanup_lock.locked():
155
+ return
156
+
157
+ async with self._cleanup_lock:
158
+ try:
159
+ max_mb = setting.global_config.get(f"{self.cache_type}_cache_max_size_mb", 500)
160
+ max_bytes = max_mb * 1024 * 1024
161
+
162
+ # ่Žทๅ–ๆ–‡ไปถไฟกๆฏ (path, size, mtime)
163
+ files = [(f, (s := f.stat()).st_size, s.st_mtime)
164
+ for f in self.cache_dir.glob("*") if f.is_file()]
165
+ total = sum(size for _, size, _ in files)
166
+
167
+ if total <= max_bytes:
168
+ return
169
+
170
+ self._log("info", f"ๆธ…็†็ผ“ๅญ˜ {total/1024/1024:.1f}MB -> {max_mb}MB")
171
+
172
+ # ๅˆ ้™คๆœ€ๆ—ง็š„ๆ–‡ไปถ
173
+ for path, size, _ in sorted(files, key=lambda x: x[2]):
174
+ if total <= max_bytes:
175
+ break
176
+ await asyncio.to_thread(path.unlink)
177
+ total -= size
178
+
179
+ self._log("info", f"ๆธ…็†ๅฎŒๆˆ: {total/1024/1024:.1f}MB")
180
+ except Exception as e:
181
+ self._log("error", f"ๆธ…็†ๅคฑ่ดฅ: {e}")
182
+
183
+
184
+ class ImageCache(CacheService):
185
+ """ๅ›พ็‰‡็ผ“ๅญ˜ๆœๅŠก"""
186
+
187
+ def __init__(self):
188
+ super().__init__("image", timeout=30.0)
189
+
190
+ async def download_image(self, path: str, token: str) -> Optional[Path]:
191
+ """ไธ‹่ฝฝๅ›พ็‰‡"""
192
+ return await self.download(path, token)
193
+
194
+ @staticmethod
195
+ def to_base64(image_path: Path) -> Optional[str]:
196
+ """ๅ›พ็‰‡่ฝฌbase64"""
197
+ try:
198
+ if not image_path.exists():
199
+ logger.error(f"[ImageCache] ๆ–‡ไปถไธๅญ˜ๅœจ: {image_path}")
200
+ return None
201
+
202
+ data = base64.b64encode(image_path.read_bytes()).decode()
203
+ mime = MIME_TYPES.get(image_path.suffix.lower(), DEFAULT_MIME)
204
+ return f"data:{mime};base64,{data}"
205
+ except Exception as e:
206
+ logger.error(f"[ImageCache] ่ฝฌๆขๅคฑ่ดฅ: {e}")
207
+ return None
208
+
209
+ async def download_base64(self, path: str, token: str) -> Optional[str]:
210
+ """ไธ‹่ฝฝๅนถ่ฝฌไธบbase64๏ผˆ่‡ชๅŠจๅˆ ้™คไธดๆ—ถๆ–‡ไปถ๏ผ‰"""
211
+ try:
212
+ cache_path = await self.download(path, token)
213
+ if not cache_path:
214
+ return None
215
+
216
+ result = self.to_base64(cache_path)
217
+
218
+ # ๆธ…็†ไธดๆ—ถๆ–‡ไปถ
219
+ try:
220
+ cache_path.unlink()
221
+ except Exception as e:
222
+ logger.warning(f"[ImageCache] ๅˆ ้™คไธดๆ—ถๆ–‡ไปถๅคฑ่ดฅ: {e}")
223
+
224
+ return result
225
+ except Exception as e:
226
+ logger.error(f"[ImageCache] ไธ‹่ฝฝbase64ๅคฑ่ดฅ: {e}")
227
+ return None
228
+
229
+
230
+ class VideoCache(CacheService):
231
+ """่ง†้ข‘็ผ“ๅญ˜ๆœๅŠก"""
232
+
233
+ def __init__(self):
234
+ super().__init__("video", timeout=60.0)
235
+
236
+ async def download_video(self, path: str, token: str) -> Optional[Path]:
237
+ """ไธ‹่ฝฝ่ง†้ข‘"""
238
+ return await self.download(path, token)
239
+
240
+
241
+ # ๅ…จๅฑ€ๅฎžไพ‹
242
+ image_cache_service = ImageCache()
243
+ video_cache_service = VideoCache()
app/services/grok/client.py ADDED
@@ -0,0 +1,358 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Grok API ๅฎขๆˆท็ซฏ - ๅค„็†OpenAIๅˆฐGrok็š„่ฏทๆฑ‚่ฝฌๆขๅ’Œๅ“ๅบ”ๅค„็†"""
2
+
3
+ import asyncio
4
+ import orjson
5
+ from typing import Dict, List, Tuple, Any, Optional
6
+ from curl_cffi.requests import AsyncSession as curl_AsyncSession
7
+
8
+ from app.core.config import setting
9
+ from app.core.logger import logger
10
+ from app.models.grok_models import Models
11
+ from app.services.grok.processer import GrokResponseProcessor
12
+ from app.services.grok.statsig import get_dynamic_headers
13
+ from app.services.grok.token import token_manager
14
+ from app.services.grok.upload import ImageUploadManager
15
+ from app.services.grok.create import PostCreateManager
16
+ from app.core.exception import GrokApiException
17
+
18
+
19
+ # ๅธธ้‡
20
+ API_ENDPOINT = "https://grok.com/rest/app-chat/conversations/new"
21
+ TIMEOUT = 120
22
+ BROWSER = "chrome133a"
23
+ MAX_RETRY = 3
24
+ MAX_UPLOADS = 20 # ๆ้ซ˜ๅนถๅ‘ไธŠไผ ้™ๅˆถไปฅๆ”ฏๆŒๆ›ด้ซ˜ๅนถๅ‘
25
+
26
+
27
+ class GrokClient:
28
+ """Grok API ๅฎขๆˆท็ซฏ"""
29
+
30
+ _upload_sem = None # ๅปถ่ฟŸๅˆๅง‹ๅŒ–
31
+
32
+ @staticmethod
33
+ def _get_upload_semaphore():
34
+ """่Žทๅ–ไธŠไผ ไฟกๅท้‡๏ผˆๅŠจๆ€้…็ฝฎ๏ผ‰"""
35
+ if GrokClient._upload_sem is None:
36
+ # ไปŽ้…็ฝฎ่ฏปๅ–๏ผŒๅฆ‚ๆžœไธๅฏ็”จๅˆ™ไฝฟ็”จ้ป˜่ฎคๅ€ผ
37
+ max_concurrency = setting.global_config.get("max_upload_concurrency", MAX_UPLOADS)
38
+ GrokClient._upload_sem = asyncio.Semaphore(max_concurrency)
39
+ logger.debug(f"[Client] ๅˆๅง‹ๅŒ–ไธŠไผ ๅนถๅ‘้™ๅˆถ: {max_concurrency}")
40
+ return GrokClient._upload_sem
41
+
42
+ @staticmethod
43
+ async def openai_to_grok(request: dict):
44
+ """่ฝฌๆขOpenAI่ฏทๆฑ‚ไธบGrok่ฏทๆฑ‚"""
45
+ model = request["model"]
46
+ content, images = GrokClient._extract_content(request["messages"])
47
+ stream = request.get("stream", False)
48
+
49
+ # ่Žทๅ–ๆจกๅž‹ไฟกๆฏ
50
+ info = Models.get_model_info(model)
51
+ grok_model, mode = Models.to_grok(model)
52
+ is_video = info.get("is_video_model", False)
53
+
54
+ # ่ง†้ข‘ๆจกๅž‹้™ๅˆถ
55
+ if is_video and len(images) > 1:
56
+ logger.warning(f"[Client] ่ง†้ข‘ๆจกๅž‹ไป…ๆ”ฏๆŒ1ๅผ ๅ›พ็‰‡๏ผŒๅทฒๆˆชๅ–ๅ‰1ๅผ ")
57
+ images = images[:1]
58
+
59
+ return await GrokClient._retry(model, content, images, grok_model, mode, is_video, stream)
60
+
61
+ @staticmethod
62
+ async def _retry(model: str, content: str, images: List[str], grok_model: str, mode: str, is_video: bool, stream: bool):
63
+ """้‡่ฏ•่ฏทๆฑ‚"""
64
+ last_err = None
65
+
66
+ for i in range(MAX_RETRY):
67
+ try:
68
+ token = await token_manager.get_token(model)
69
+ img_ids, img_uris = await GrokClient._upload(images, token)
70
+
71
+ # ่ง†้ข‘ๆจกๅž‹ๅˆ›ๅปบไผš่ฏ
72
+ post_id = None
73
+ if is_video and img_ids and img_uris:
74
+ post_id = await GrokClient._create_post(img_ids[0], img_uris[0], token)
75
+
76
+ payload = GrokClient._build_payload(content, grok_model, mode, img_ids, img_uris, is_video, post_id)
77
+ return await GrokClient._request(payload, token, model, stream, post_id)
78
+
79
+ except GrokApiException as e:
80
+ last_err = e
81
+ # ๆฃ€ๆŸฅๆ˜ฏๅฆๅฏ้‡่ฏ•
82
+ if e.error_code not in ["HTTP_ERROR", "NO_AVAILABLE_TOKEN"]:
83
+ raise
84
+
85
+ status = e.context.get("status") if e.context else None
86
+ retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
87
+
88
+ if status not in retry_codes:
89
+ raise
90
+
91
+ if i < MAX_RETRY - 1:
92
+ logger.warning(f"[Client] ๅคฑ่ดฅ(็Šถๆ€:{status}), ้‡่ฏ• {i+1}/{MAX_RETRY}")
93
+ await asyncio.sleep(0.5)
94
+
95
+ raise last_err or GrokApiException("่ฏทๆฑ‚ๅคฑ่ดฅ", "REQUEST_ERROR")
96
+
97
+ @staticmethod
98
+ def _extract_content(messages: List[Dict]) -> Tuple[str, List[str]]:
99
+ """ๆๅ–ๆ–‡ๆœฌๅ’Œๅ›พ็‰‡๏ผŒไฟ็•™่ง’่‰ฒ็ป“ๆž„"""
100
+ formatted_messages = []
101
+ images = []
102
+
103
+ # ่ง’่‰ฒๆ˜ ๅฐ„
104
+ role_map = {
105
+ "system": "็ณป็ปŸ",
106
+ "user": "็”จๆˆท",
107
+ "assistant": "grok"
108
+ }
109
+
110
+ for msg in messages:
111
+ role = msg.get("role", "user")
112
+ content = msg.get("content", "")
113
+ role_prefix = role_map.get(role, role)
114
+
115
+ # ๆๅ–ๆ–‡ๆœฌๅ†…ๅฎน
116
+ text_parts = []
117
+ if isinstance(content, list):
118
+ for item in content:
119
+ if item.get("type") == "text":
120
+ text_parts.append(item.get("text", ""))
121
+ elif item.get("type") == "image_url":
122
+ if url := item.get("image_url", {}).get("url"):
123
+ images.append(url)
124
+ else:
125
+ text_parts.append(content)
126
+
127
+ # ๅˆๅนถ่ฏฅๆถˆๆฏ็š„ๆ–‡ๆœฌๅนถๆทปๅŠ ่ง’่‰ฒๅ‰็ผ€
128
+ msg_text = "".join(text_parts).strip()
129
+ if msg_text:
130
+ formatted_messages.append(f"{role_prefix}๏ผš{msg_text}")
131
+
132
+ # ็”จๆข่กŒ็ฌฆ่ฟžๆŽฅๆ‰€ๆœ‰ๆถˆๆฏ
133
+ return "\n".join(formatted_messages), images
134
+
135
+ @staticmethod
136
+ async def _upload(urls: List[str], token: str) -> Tuple[List[str], List[str]]:
137
+ """ๅนถๅ‘ไธŠไผ ๅ›พ็‰‡"""
138
+ if not urls:
139
+ return [], []
140
+
141
+ async def upload_limited(url):
142
+ async with GrokClient._get_upload_semaphore():
143
+ return await ImageUploadManager.upload(url, token)
144
+
145
+ results = await asyncio.gather(*[upload_limited(u) for u in urls], return_exceptions=True)
146
+
147
+ ids, uris = [], []
148
+ for url, result in zip(urls, results):
149
+ if isinstance(result, Exception):
150
+ logger.warning(f"[Client] ไธŠไผ ๅคฑ่ดฅ: {url} - {result}")
151
+ elif isinstance(result, tuple) and len(result) == 2:
152
+ fid, furi = result
153
+ if fid:
154
+ ids.append(fid)
155
+ uris.append(furi)
156
+
157
+ return ids, uris
158
+
159
+ @staticmethod
160
+ async def _create_post(file_id: str, file_uri: str, token: str) -> Optional[str]:
161
+ """ๅˆ›ๅปบ่ง†้ข‘ไผš่ฏ"""
162
+ try:
163
+ result = await PostCreateManager.create(file_id, file_uri, token)
164
+ if result and result.get("success"):
165
+ return result.get("post_id")
166
+ except Exception as e:
167
+ logger.warning(f"[Client] ๅˆ›ๅปบไผš่ฏๅคฑ่ดฅ: {e}")
168
+ return None
169
+
170
+ @staticmethod
171
+ def _build_payload(content: str, model: str, mode: str, img_ids: List[str], img_uris: List[str], is_video: bool = False, post_id: str = None) -> Dict:
172
+ """ๆž„ๅปบ่ฏทๆฑ‚่ฝฝ่ท"""
173
+ # ่ง†้ข‘ๆจกๅž‹็‰นๆฎŠๅค„็†
174
+ if is_video and img_uris:
175
+ img_msg = f"https://grok.com/imagine/{post_id}" if post_id else f"https://assets.grok.com/post/{img_uris[0]}"
176
+ return {
177
+ "temporary": True,
178
+ "modelName": "grok-3",
179
+ "message": f"{img_msg} {content} --mode=custom",
180
+ "fileAttachments": img_ids,
181
+ "toolOverrides": {"videoGen": True}
182
+ }
183
+
184
+ # ๆ ‡ๅ‡†่ฝฝ่ท
185
+ return {
186
+ "temporary": setting.grok_config.get("temporary", True),
187
+ "modelName": model,
188
+ "message": content,
189
+ "fileAttachments": img_ids,
190
+ "imageAttachments": [],
191
+ "disableSearch": False,
192
+ "enableImageGeneration": True,
193
+ "returnImageBytes": False,
194
+ "returnRawGrokInXaiRequest": False,
195
+ "enableImageStreaming": True,
196
+ "imageGenerationCount": 2,
197
+ "forceConcise": False,
198
+ "toolOverrides": {},
199
+ "enableSideBySide": True,
200
+ "sendFinalMetadata": True,
201
+ "isReasoning": False,
202
+ "webpageUrls": [],
203
+ "disableTextFollowUps": True,
204
+ "responseMetadata": {"requestModelDetails": {"modelId": model}},
205
+ "disableMemory": False,
206
+ "forceSideBySide": False,
207
+ "modelMode": mode,
208
+ "isAsyncChat": False
209
+ }
210
+
211
+ @staticmethod
212
+ async def _request(payload: dict, token: str, model: str, stream: bool, post_id: str = None):
213
+ """ๅ‘้€่ฏทๆฑ‚"""
214
+ if not token:
215
+ raise GrokApiException("่ฎค่ฏไปค็‰Œ็ผบๅคฑ", "NO_AUTH_TOKEN")
216
+
217
+ # ๅค–ๅฑ‚้‡่ฏ•๏ผšๅฏ้…็ฝฎ็Šถๆ€็ ๏ผˆ401/429็ญ‰๏ผ‰
218
+ retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
219
+ MAX_OUTER_RETRY = 3
220
+
221
+ for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎž้™…้‡่ฏ•3ๆฌก
222
+ # ๅ†…ๅฑ‚้‡่ฏ•๏ผš403ไปฃ็†ๆฑ ้‡่ฏ•
223
+ max_403_retries = 5
224
+ retry_403_count = 0
225
+
226
+ while retry_403_count <= max_403_retries:
227
+ # ๅผ‚ๆญฅ่Žทๅ–ไปฃ็†
228
+ from app.core.proxy_pool import proxy_pool
229
+
230
+ # ๅฆ‚ๆžœๆ˜ฏ403้‡่ฏ•ไธ”ไฝฟ็”จไปฃ็†ๆฑ ๏ผŒๅผบๅˆถๅˆทๆ–ฐไปฃ็†
231
+ if retry_403_count > 0 and proxy_pool._enabled:
232
+ logger.info(f"[Client] 403้‡่ฏ• {retry_403_count}/{max_403_retries}๏ผŒๅˆทๆ–ฐไปฃ็†...")
233
+ proxy = await proxy_pool.force_refresh()
234
+ else:
235
+ proxy = await setting.get_proxy_async("service")
236
+
237
+ proxies = {"http": proxy, "https": proxy} if proxy else None
238
+
239
+ # ๆž„ๅปบ่ฏทๆฑ‚ๅคด๏ผˆๆ”พๅœจๅพช็Žฏๅ†…ไปฅๆ”ฏๆŒ้‡่ฏ•ๆ–ฐToken๏ผ‰
240
+ headers = GrokClient._build_headers(token)
241
+ if model == "grok-imagine-0.9":
242
+ file_attachments = payload.get("fileAttachments", [])
243
+ ref_id = post_id or (file_attachments[0] if file_attachments else "")
244
+ if ref_id:
245
+ headers["Referer"] = f"https://grok.com/imagine/{ref_id}"
246
+
247
+ # ๅˆ›ๅปบไผš่ฏๅนถๆ‰ง่กŒ่ฏทๆฑ‚
248
+ session = curl_AsyncSession(impersonate=BROWSER)
249
+ try:
250
+ response = await session.post(
251
+ API_ENDPOINT,
252
+ headers=headers,
253
+ data=orjson.dumps(payload),
254
+ timeout=TIMEOUT,
255
+ stream=True,
256
+ proxies=proxies
257
+ )
258
+
259
+ # ๅ†…ๅฑ‚403้‡่ฏ•๏ผšไป…ๅฝ“ๆœ‰ไปฃ็†ๆฑ ๆ—ถ่งฆๅ‘
260
+ if response.status_code == 403 and proxy_pool._enabled:
261
+ retry_403_count += 1
262
+ if retry_403_count <= max_403_retries:
263
+ logger.warning(f"[Client] ้‡ๅˆฐ403้”™่ฏฏ๏ผŒๆญฃๅœจ้‡่ฏ• ({retry_403_count}/{max_403_retries})...")
264
+ await session.close()
265
+ await asyncio.sleep(0.5)
266
+ continue
267
+ logger.error(f"[Client] 403้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{retry_403_count-1}ๆฌก๏ผŒๆ”พๅผƒ")
268
+
269
+ # ๆฃ€ๆŸฅๅฏ้…็ฝฎ็Šถๆ€็ ้”™่ฏฏ - ๅค–ๅฑ‚้‡่ฏ•
270
+ if response.status_code in retry_codes:
271
+ if outer_retry < MAX_OUTER_RETRY:
272
+ delay = (outer_retry + 1) * 0.1
273
+ logger.warning(f"[Client] ้‡ๅˆฐ{response.status_code}้”™่ฏฏ๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผŒ็ญ‰ๅพ…{delay}s...")
274
+ await session.close()
275
+ await asyncio.sleep(delay)
276
+ break # ่ทณๅ‡บๅ†…ๅฑ‚ๅพช็Žฏ๏ผŒ่ฟ›ๅ…ฅๅค–ๅฑ‚้‡่ฏ•
277
+ else:
278
+ logger.error(f"[Client] {response.status_code}้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{outer_retry}ๆฌก๏ผŒๆ”พๅผƒ")
279
+ try:
280
+ GrokClient._handle_error(response, token)
281
+ finally:
282
+ await session.close()
283
+
284
+ # ๆฃ€ๆŸฅๅ…ถไป–ๅ“ๅบ”็Šถๆ€
285
+ if response.status_code != 200:
286
+ try:
287
+ GrokClient._handle_error(response, token)
288
+ finally:
289
+ await session.close()
290
+
291
+ # ๆˆๅŠŸ - ้‡็ฝฎๅคฑ่ดฅ่ฎกๆ•ฐ
292
+ asyncio.create_task(token_manager.reset_failure(token))
293
+
294
+ if outer_retry > 0 or retry_403_count > 0:
295
+ logger.info(f"[Client] ้‡่ฏ•ๆˆๅŠŸ๏ผ")
296
+
297
+ # ๅค„็†ๅ“ๅบ”
298
+ if stream:
299
+ # ๆตๅผๅ“ๅบ”็”ฑ่ฟญไปฃๅ™จ่ดŸ่ดฃๅ…ณ้—ญ session
300
+ result = GrokResponseProcessor.process_stream(response, token, session)
301
+ else:
302
+ # ๆ™ฎ้€šๅ“ๅบ”ๅค„็†ๅฎŒ็ซ‹ๅณๅ…ณ้—ญ session
303
+ try:
304
+ result = await GrokResponseProcessor.process_normal(response, token, model)
305
+ finally:
306
+ await session.close()
307
+
308
+ asyncio.create_task(GrokClient._update_limits(token, model))
309
+ return result
310
+
311
+ except Exception as e:
312
+ await session.close()
313
+ if "RequestsError" in str(type(e)):
314
+ logger.error(f"[Client] ็ฝ‘็ปœ้”™่ฏฏ: {e}")
315
+ raise GrokApiException(f"็ฝ‘็ปœ้”™่ฏฏ: {e}", "NETWORK_ERROR") from e
316
+ raise
317
+
318
+ raise GrokApiException("่ฏทๆฑ‚ๅคฑ่ดฅ๏ผšๅทฒ่พพๅˆฐๆœ€ๅคง้‡่ฏ•ๆฌกๆ•ฐ", "MAX_RETRIES_EXCEEDED")
319
+
320
+
321
+ @staticmethod
322
+ def _build_headers(token: str) -> Dict[str, str]:
323
+ """ๆž„ๅปบ่ฏทๆฑ‚ๅคด"""
324
+ headers = get_dynamic_headers("/rest/app-chat/conversations/new")
325
+ cf = setting.grok_config.get("cf_clearance", "")
326
+ headers["Cookie"] = f"{token};{cf}" if cf else token
327
+ return headers
328
+
329
+ @staticmethod
330
+ def _handle_error(response, token: str):
331
+ """ๅค„็†้”™่ฏฏ"""
332
+ if response.status_code == 403:
333
+ msg = "ๆ‚จ็š„IP่ขซๆ‹ฆๆˆช๏ผŒ่ฏทๅฐ่ฏ•ไปฅไธ‹ๆ–นๆณ•ไน‹ไธ€: 1.ๆ›ดๆขIP 2.ไฝฟ็”จไปฃ็† 3.้…็ฝฎCFๅ€ผ"
334
+ data = {"cf_blocked": True, "status": 403}
335
+ logger.warning(f"[Client] {msg}")
336
+ else:
337
+ try:
338
+ data = response.json()
339
+ msg = str(data)
340
+ except:
341
+ data = response.text
342
+ msg = data[:200] if data else "ๆœช็Ÿฅ้”™่ฏฏ"
343
+
344
+ asyncio.create_task(token_manager.record_failure(token, response.status_code, msg))
345
+ asyncio.create_task(token_manager.apply_cooldown(token, response.status_code))
346
+ raise GrokApiException(
347
+ f"่ฏทๆฑ‚ๅคฑ่ดฅ: {response.status_code} - {msg}",
348
+ "HTTP_ERROR",
349
+ {"status": response.status_code, "data": data}
350
+ )
351
+
352
+ @staticmethod
353
+ async def _update_limits(token: str, model: str):
354
+ """ๆ›ดๆ–ฐ้€Ÿ็އ้™ๅˆถ"""
355
+ try:
356
+ await token_manager.check_limits(token, model)
357
+ except Exception as e:
358
+ logger.error(f"[Client] ๆ›ดๆ–ฐ้™ๅˆถๅคฑ่ดฅ: {e}")
app/services/grok/create.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Postๅˆ›ๅปบ็ฎก็†ๅ™จ - ็”จไบŽ่ง†้ข‘็”Ÿๆˆๅ‰็š„ไผš่ฏๅˆ›ๅปบ"""
2
+
3
+ import asyncio
4
+ import orjson
5
+ from typing import Dict, Any, Optional
6
+ from curl_cffi.requests import AsyncSession
7
+
8
+ from app.services.grok.statsig import get_dynamic_headers
9
+ from app.core.exception import GrokApiException
10
+ from app.core.config import setting
11
+ from app.core.logger import logger
12
+
13
+
14
+ # ๅธธ้‡
15
+ ENDPOINT = "https://grok.com/rest/media/post/create"
16
+ TIMEOUT = 30
17
+ BROWSER = "chrome133a"
18
+
19
+
20
+ class PostCreateManager:
21
+ """ไผš่ฏๅˆ›ๅปบ็ฎก็†ๅ™จ"""
22
+
23
+ @staticmethod
24
+ async def create(file_id: str, file_uri: str, auth_token: str) -> Optional[Dict[str, Any]]:
25
+ """ๅˆ›ๅปบไผš่ฏ่ฎฐๅฝ•
26
+
27
+ Args:
28
+ file_id: ๆ–‡ไปถID
29
+ file_uri: ๆ–‡ไปถURI
30
+ auth_token: ่ฎค่ฏไปค็‰Œ
31
+
32
+ Returns:
33
+ ไผš่ฏไฟกๆฏๅญ—ๅ…ธ๏ผŒๅŒ…ๅซpost_id็ญ‰
34
+ """
35
+ # ๅ‚ๆ•ฐ้ชŒ่ฏ
36
+ if not file_id or not file_uri:
37
+ raise GrokApiException("ๆ–‡ไปถIDๆˆ–URI็ผบๅคฑ", "INVALID_PARAMS")
38
+ if not auth_token:
39
+ raise GrokApiException("่ฎค่ฏไปค็‰Œ็ผบๅคฑ", "NO_AUTH_TOKEN")
40
+
41
+ try:
42
+ # ๆž„ๅปบ่ฏทๆฑ‚
43
+ data = {
44
+ "media_url": f"https://assets.grok.com/{file_uri}",
45
+ "media_type": "MEDIA_POST_TYPE_IMAGE"
46
+ }
47
+
48
+ cf = setting.grok_config.get("cf_clearance", "")
49
+ headers = {
50
+ **get_dynamic_headers("/rest/media/post/create"),
51
+ "Cookie": f"{auth_token};{cf}" if cf else auth_token
52
+ }
53
+
54
+ # ๅค–ๅฑ‚้‡่ฏ•๏ผšๅฏ้…็ฝฎ็Šถๆ€็ ๏ผˆ401/429็ญ‰๏ผ‰
55
+ retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
56
+ MAX_OUTER_RETRY = 3
57
+
58
+ for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎž้™…้‡่ฏ•3ๆฌก
59
+ # ๅ†…ๅฑ‚้‡่ฏ•๏ผš403ไปฃ็†ๆฑ ้‡่ฏ•
60
+ max_403_retries = 5
61
+ retry_403_count = 0
62
+
63
+ while retry_403_count <= max_403_retries:
64
+ # ๅผ‚ๆญฅ่Žทๅ–ไปฃ็†๏ผˆๆ”ฏๆŒไปฃ็†ๆฑ ๏ผ‰
65
+ from app.core.proxy_pool import proxy_pool
66
+
67
+ # ๅฆ‚ๆžœๆ˜ฏ403้‡่ฏ•ไธ”ไฝฟ็”จไปฃ็†ๆฑ ๏ผŒๅผบๅˆถๅˆทๆ–ฐไปฃ็†
68
+ if retry_403_count > 0 and proxy_pool._enabled:
69
+ logger.info(f"[PostCreate] 403้‡่ฏ• {retry_403_count}/{max_403_retries}๏ผŒๅˆทๆ–ฐไปฃ็†...")
70
+ proxy = await proxy_pool.force_refresh()
71
+ else:
72
+ proxy = await setting.get_proxy_async("service")
73
+
74
+ proxies = {"http": proxy, "https": proxy} if proxy else None
75
+
76
+ # ๅ‘้€่ฏทๆฑ‚
77
+ async with AsyncSession() as session:
78
+ response = await session.post(
79
+ ENDPOINT,
80
+ headers=headers,
81
+ json=data,
82
+ impersonate=BROWSER,
83
+ timeout=TIMEOUT,
84
+ proxies=proxies
85
+ )
86
+
87
+ # ๅ†…ๅฑ‚403้‡่ฏ•๏ผšไป…ๅฝ“ๆœ‰ไปฃ็†ๆฑ ๆ—ถ่งฆๅ‘
88
+ if response.status_code == 403 and proxy_pool._enabled:
89
+ retry_403_count += 1
90
+
91
+ if retry_403_count <= max_403_retries:
92
+ logger.warning(f"[PostCreate] ้‡ๅˆฐ403้”™่ฏฏ๏ผŒๆญฃๅœจ้‡่ฏ• ({retry_403_count}/{max_403_retries})...")
93
+ await asyncio.sleep(0.5)
94
+ continue
95
+
96
+ # ๅ†…ๅฑ‚้‡่ฏ•ๅ…จ้ƒจๅคฑ่ดฅ
97
+ logger.error(f"[PostCreate] 403้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{retry_403_count-1}ๆฌก๏ผŒๆ”พๅผƒ")
98
+
99
+ # ๆฃ€ๆŸฅๅฏ้…็ฝฎ็Šถๆ€็ ้”™่ฏฏ - ๅค–ๅฑ‚้‡่ฏ•
100
+ if response.status_code in retry_codes:
101
+ if outer_retry < MAX_OUTER_RETRY:
102
+ delay = (outer_retry + 1) * 0.1 # ๆธ่ฟ›ๅปถ่ฟŸ๏ผš0.1s, 0.2s, 0.3s
103
+ logger.warning(f"[PostCreate] ้‡ๅˆฐ{response.status_code}้”™่ฏฏ๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผŒ็ญ‰ๅพ…{delay}s...")
104
+ await asyncio.sleep(delay)
105
+ break # ่ทณๅ‡บๅ†…ๅฑ‚ๅพช็Žฏ๏ผŒ่ฟ›ๅ…ฅๅค–ๅฑ‚้‡่ฏ•
106
+ else:
107
+ logger.error(f"[PostCreate] {response.status_code}้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{outer_retry}ๆฌก๏ผŒๆ”พๅผƒ")
108
+ raise GrokApiException(f"ๅˆ›ๅปบๅคฑ่ดฅ: {response.status_code}้”™่ฏฏ", "CREATE_ERROR")
109
+
110
+ if response.status_code == 200:
111
+ result = response.json()
112
+ post_id = result.get("post", {}).get("id", "")
113
+
114
+ if outer_retry > 0 or retry_403_count > 0:
115
+ logger.info(f"[PostCreate] ้‡่ฏ•ๆˆๅŠŸ๏ผ")
116
+
117
+ logger.debug(f"[PostCreate] ๆˆๅŠŸ๏ผŒไผš่ฏID: {post_id}")
118
+ return {
119
+ "post_id": post_id,
120
+ "file_id": file_id,
121
+ "file_uri": file_uri,
122
+ "success": True,
123
+ "data": result
124
+ }
125
+
126
+ # ๅ…ถไป–้”™่ฏฏๅค„็†
127
+ try:
128
+ error = response.json()
129
+ msg = f"็Šถๆ€็ : {response.status_code}, ่ฏฆๆƒ…: {error}"
130
+ except:
131
+ msg = f"็Šถๆ€็ : {response.status_code}, ่ฏฆๆƒ…: {response.text[:200]}"
132
+
133
+ logger.error(f"[PostCreate] ๅคฑ่ดฅ: {msg}")
134
+ raise GrokApiException(f"ๅˆ›ๅปบๅคฑ่ดฅ: {msg}", "CREATE_ERROR")
135
+
136
+ except GrokApiException:
137
+ raise
138
+ except Exception as e:
139
+ logger.error(f"[PostCreate] ๅผ‚ๅธธ: {e}")
140
+ raise GrokApiException(f"ๅˆ›ๅปบๅผ‚ๅธธ: {e}", "CREATE_ERROR") from e
app/services/grok/processer.py ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Grok API ๅ“ๅบ”ๅค„็†ๅ™จ - ๅค„็†ๆตๅผๅ’Œ้žๆตๅผๅ“ๅบ”"""
2
+
3
+ import orjson
4
+ import uuid
5
+ import time
6
+ import asyncio
7
+ from typing import AsyncGenerator, Tuple, Any
8
+
9
+ from app.core.config import setting
10
+ from app.core.exception import GrokApiException
11
+ from app.core.logger import logger
12
+ from app.models.openai_schema import (
13
+ OpenAIChatCompletionResponse,
14
+ OpenAIChatCompletionChoice,
15
+ OpenAIChatCompletionMessage,
16
+ OpenAIChatCompletionChunkResponse,
17
+ OpenAIChatCompletionChunkChoice,
18
+ OpenAIChatCompletionChunkMessage
19
+ )
20
+ from app.services.grok.cache import image_cache_service, video_cache_service
21
+
22
+
23
+ class StreamTimeoutManager:
24
+ """ๆตๅผๅ“ๅบ”่ถ…ๆ—ถ็ฎก็†"""
25
+
26
+ def __init__(self, chunk_timeout: int = 120, first_timeout: int = 30, total_timeout: int = 600):
27
+ self.chunk_timeout = chunk_timeout
28
+ self.first_timeout = first_timeout
29
+ self.total_timeout = total_timeout
30
+ self.start_time = asyncio.get_event_loop().time()
31
+ self.last_chunk_time = self.start_time
32
+ self.first_received = False
33
+
34
+ def check_timeout(self) -> Tuple[bool, str]:
35
+ """ๆฃ€ๆŸฅ่ถ…ๆ—ถ"""
36
+ now = asyncio.get_event_loop().time()
37
+
38
+ if not self.first_received and now - self.start_time > self.first_timeout:
39
+ return True, f"้ฆ–ๆฌกๅ“ๅบ”่ถ…ๆ—ถ({self.first_timeout}็ง’)"
40
+
41
+ if self.total_timeout > 0 and now - self.start_time > self.total_timeout:
42
+ return True, f"ๆ€ป่ถ…ๆ—ถ({self.total_timeout}็ง’)"
43
+
44
+ if self.first_received and now - self.last_chunk_time > self.chunk_timeout:
45
+ return True, f"ๆ•ฐๆฎๅ—่ถ…ๆ—ถ({self.chunk_timeout}็ง’)"
46
+
47
+ return False, ""
48
+
49
+ def mark_received(self):
50
+ """ๆ ‡่ฎฐๆ”ถๅˆฐๆ•ฐๆฎ"""
51
+ self.last_chunk_time = asyncio.get_event_loop().time()
52
+ self.first_received = True
53
+
54
+ def duration(self) -> float:
55
+ """่Žทๅ–ๆ€ป่€—ๆ—ถ"""
56
+ return asyncio.get_event_loop().time() - self.start_time
57
+
58
+
59
+ class GrokResponseProcessor:
60
+ """Grokๅ“ๅบ”ๅค„็†ๅ™จ"""
61
+
62
+ @staticmethod
63
+ async def process_normal(response, auth_token: str, model: str = None) -> OpenAIChatCompletionResponse:
64
+ """ๅค„็†้žๆตๅผๅ“ๅบ”"""
65
+ response_closed = False
66
+ try:
67
+ async for chunk in response.aiter_lines():
68
+ if not chunk:
69
+ continue
70
+
71
+ data = orjson.loads(chunk)
72
+
73
+ # ้”™่ฏฏๆฃ€ๆŸฅ
74
+ if error := data.get("error"):
75
+ raise GrokApiException(
76
+ f"API้”™่ฏฏ: {error.get('message', 'ๆœช็Ÿฅ้”™่ฏฏ')}",
77
+ "API_ERROR",
78
+ {"code": error.get("code")}
79
+ )
80
+
81
+ grok_resp = data.get("result", {}).get("response", {})
82
+
83
+ # ่ง†้ข‘ๅ“ๅบ”
84
+ if video_resp := grok_resp.get("streamingVideoGenerationResponse"):
85
+ if video_url := video_resp.get("videoUrl"):
86
+ content = await GrokResponseProcessor._build_video_content(video_url, auth_token)
87
+ result = GrokResponseProcessor._build_response(content, model or "grok-imagine-0.9")
88
+ response_closed = True
89
+ response.close()
90
+ return result
91
+
92
+ # ๆจกๅž‹ๅ“ๅบ”
93
+ model_response = grok_resp.get("modelResponse")
94
+ if not model_response:
95
+ continue
96
+
97
+ if error_msg := model_response.get("error"):
98
+ raise GrokApiException(f"ๆจกๅž‹้”™่ฏฏ: {error_msg}", "MODEL_ERROR")
99
+
100
+ # ๆž„ๅปบๅ†…ๅฎน
101
+ content = model_response.get("message", "")
102
+ model_name = model_response.get("model")
103
+
104
+ # ๅค„็†ๅ›พ็‰‡
105
+ if images := model_response.get("generatedImageUrls"):
106
+ content = await GrokResponseProcessor._append_images(content, images, auth_token)
107
+
108
+ result = GrokResponseProcessor._build_response(content, model_name)
109
+ response_closed = True
110
+ response.close()
111
+ return result
112
+
113
+ raise GrokApiException("ๆ— ๅ“ๅบ”ๆ•ฐๆฎ", "NO_RESPONSE")
114
+
115
+ except orjson.JSONDecodeError as e:
116
+ logger.error(f"[Processor] JSON่งฃๆžๅคฑ่ดฅ: {e}")
117
+ raise GrokApiException(f"JSON่งฃๆžๅคฑ่ดฅ: {e}", "JSON_ERROR") from e
118
+ except Exception as e:
119
+ logger.error(f"[Processor] ๅค„็†้”™่ฏฏ: {type(e).__name__}: {e}")
120
+ raise GrokApiException(f"ๅ“ๅบ”ๅค„็†้”™่ฏฏ: {e}", "PROCESS_ERROR") from e
121
+ finally:
122
+ if not response_closed and hasattr(response, 'close'):
123
+ try:
124
+ response.close()
125
+ except Exception as e:
126
+ logger.warning(f"[Processor] ๅ…ณ้—ญๅ“ๅบ”ๅคฑ่ดฅ: {e}")
127
+
128
+ @staticmethod
129
+ async def process_stream(response, auth_token: str, session: Any = None) -> AsyncGenerator[str, None]:
130
+ """ๅค„็†ๆตๅผๅ“ๅบ”"""
131
+ # ็Šถๆ€ๅ˜้‡
132
+ is_image = False
133
+ is_thinking = False
134
+ thinking_finished = False
135
+ model = None
136
+ filtered_tags = setting.grok_config.get("filtered_tags", "").split(",")
137
+ video_progress_started = False
138
+ last_video_progress = -1
139
+ response_closed = False
140
+ show_thinking = setting.grok_config.get("show_thinking", True)
141
+
142
+ # ่ถ…ๆ—ถ็ฎก็†
143
+ timeout_mgr = StreamTimeoutManager(
144
+ chunk_timeout=setting.grok_config.get("stream_chunk_timeout", 120),
145
+ first_timeout=setting.grok_config.get("stream_first_response_timeout", 30),
146
+ total_timeout=setting.grok_config.get("stream_total_timeout", 600)
147
+ )
148
+
149
+ def make_chunk(content: str, finish: str = None):
150
+ """็”Ÿๆˆๅ“ๅบ”ๅ—"""
151
+ chunk_data = OpenAIChatCompletionChunkResponse(
152
+ id=f"chatcmpl-{uuid.uuid4()}",
153
+ created=int(time.time()),
154
+ model=model or "grok-4-mini-thinking-tahoe",
155
+ choices=[OpenAIChatCompletionChunkChoice(
156
+ index=0,
157
+ delta=OpenAIChatCompletionChunkMessage(
158
+ role="assistant",
159
+ content=content
160
+ ) if content else {},
161
+ finish_reason=finish
162
+ )]
163
+ )
164
+ return f"data: {chunk_data.model_dump_json()}\n\n"
165
+
166
+ try:
167
+ async for chunk in response.aiter_lines():
168
+ # ่ถ…ๆ—ถๆฃ€ๆŸฅ
169
+ is_timeout, timeout_msg = timeout_mgr.check_timeout()
170
+ if is_timeout:
171
+ logger.warning(f"[Processor] {timeout_msg}")
172
+ yield make_chunk("", "stop")
173
+ yield "data: [DONE]\n\n"
174
+ return
175
+
176
+ logger.debug(f"[Processor] ๆ”ถๅˆฐๆ•ฐๆฎๅ—: {len(chunk)} bytes")
177
+ if not chunk:
178
+ continue
179
+
180
+ try:
181
+ data = orjson.loads(chunk)
182
+
183
+ # ้”™่ฏฏๆฃ€ๆŸฅ
184
+ if error := data.get("error"):
185
+ error_msg = error.get('message', 'ๆœช็Ÿฅ้”™่ฏฏ')
186
+ logger.error(f"[Processor] API้”™่ฏฏ: {error_msg}")
187
+ yield make_chunk(f"Error: {error_msg}", "stop")
188
+ yield "data: [DONE]\n\n"
189
+ return
190
+
191
+ grok_resp = data.get("result", {}).get("response", {})
192
+ logger.debug(f"[Processor] ่งฃๆžๅ“ๅบ”: {len(grok_resp)} bytes")
193
+ if not grok_resp:
194
+ continue
195
+
196
+ timeout_mgr.mark_received()
197
+
198
+ # ๆ›ดๆ–ฐๆจกๅž‹
199
+ if user_resp := grok_resp.get("userResponse"):
200
+ if m := user_resp.get("model"):
201
+ model = m
202
+
203
+ # ่ง†้ข‘ๅค„็†
204
+ if video_resp := grok_resp.get("streamingVideoGenerationResponse"):
205
+ progress = video_resp.get("progress", 0)
206
+ v_url = video_resp.get("videoUrl")
207
+
208
+ # ่ฟ›ๅบฆๆ›ดๆ–ฐ
209
+ if progress > last_video_progress:
210
+ last_video_progress = progress
211
+ if show_thinking:
212
+ if not video_progress_started:
213
+ content = f"<think>่ง†้ข‘ๅทฒ็”Ÿๆˆ{progress}%\n"
214
+ video_progress_started = True
215
+ elif progress < 100:
216
+ content = f"่ง†้ข‘ๅทฒ็”Ÿๆˆ{progress}%\n"
217
+ else:
218
+ content = f"่ง†้ข‘ๅทฒ็”Ÿๆˆ{progress}%</think>\n"
219
+ yield make_chunk(content)
220
+
221
+ # ่ง†้ข‘URL
222
+ if v_url:
223
+ logger.debug("[Processor] ่ง†้ข‘็”ŸๆˆๅฎŒๆˆ")
224
+ video_content = await GrokResponseProcessor._build_video_content(v_url, auth_token)
225
+ yield make_chunk(video_content)
226
+
227
+ continue
228
+
229
+ # ๅ›พ็‰‡ๆจกๅผ
230
+ if grok_resp.get("imageAttachmentInfo"):
231
+ is_image = True
232
+
233
+ token = grok_resp.get("token", "")
234
+
235
+ # ๅ›พ็‰‡ๅค„็†
236
+ if is_image:
237
+ if model_resp := grok_resp.get("modelResponse"):
238
+ image_mode = setting.global_config.get("image_mode", "url")
239
+ content = ""
240
+
241
+ for img in model_resp.get("generatedImageUrls", []):
242
+ try:
243
+ if image_mode == "base64":
244
+ # Base64ๆจกๅผ - ๅˆ†ๅ—ๅ‘้€
245
+ base64_str = await image_cache_service.download_base64(f"/{img}", auth_token)
246
+ if base64_str:
247
+ # ๅˆ†ๅ—ๅ‘้€ๅคงๆ•ฐๆฎ
248
+ if not base64_str.startswith("data:"):
249
+ parts = base64_str.split(",", 1)
250
+ if len(parts) == 2:
251
+ yield make_chunk(f"![Generated Image](data:{parts[0]},")
252
+ # 8KBๅˆ†ๅ—
253
+ for i in range(0, len(parts[1]), 8192):
254
+ yield make_chunk(parts[1][i:i+8192])
255
+ yield make_chunk(")\n")
256
+ else:
257
+ yield make_chunk(f"![Generated Image]({base64_str})\n")
258
+ else:
259
+ yield make_chunk(f"![Generated Image]({base64_str})\n")
260
+ else:
261
+ yield make_chunk(f"![Generated Image](https://assets.grok.com/{img})\n")
262
+ else:
263
+ # URLๆจกๅผ
264
+ await image_cache_service.download_image(f"/{img}", auth_token)
265
+ img_path = img.replace('/', '-')
266
+ base_url = setting.global_config.get("base_url", "")
267
+ img_url = f"{base_url}/images/{img_path}" if base_url else f"/images/{img_path}"
268
+ content += f"![Generated Image]({img_url})\n"
269
+ except Exception as e:
270
+ logger.warning(f"[Processor] ๅค„็†ๅ›พ็‰‡ๅคฑ่ดฅ: {e}")
271
+ content += f"![Generated Image](https://assets.grok.com/{img})\n"
272
+
273
+ yield make_chunk(content.strip(), "stop")
274
+ return
275
+ elif token:
276
+ yield make_chunk(token)
277
+
278
+ # ๅฏน่ฏๅค„็†
279
+ else:
280
+ if isinstance(token, list):
281
+ continue
282
+
283
+ if any(tag in token for tag in filtered_tags if token):
284
+ continue
285
+
286
+ current_is_thinking = grok_resp.get("isThinking", False)
287
+ message_tag = grok_resp.get("messageTag")
288
+
289
+ if thinking_finished and current_is_thinking:
290
+ continue
291
+
292
+ # ๆœ็ดข็ป“ๆžœๅค„็†
293
+ if grok_resp.get("toolUsageCardId"):
294
+ if web_search := grok_resp.get("webSearchResults"):
295
+ if current_is_thinking:
296
+ if show_thinking:
297
+ for result in web_search.get("results", []):
298
+ title = result.get("title", "")
299
+ url = result.get("url", "")
300
+ preview = result.get("preview", "")
301
+ preview_clean = preview.replace("\n", "") if isinstance(preview, str) else ""
302
+ token += f'\n- [{title}]({url} "{preview_clean}")'
303
+ token += "\n"
304
+ else:
305
+ continue
306
+ else:
307
+ continue
308
+ else:
309
+ continue
310
+
311
+ if token:
312
+ content = token
313
+
314
+ if message_tag == "header":
315
+ content = f"\n\n{token}\n\n"
316
+
317
+ # Thinking็Šถๆ€ๅˆ‡ๆข
318
+ should_skip = False
319
+ if not is_thinking and current_is_thinking:
320
+ if show_thinking:
321
+ content = f"<think>\n{content}"
322
+ else:
323
+ should_skip = True
324
+ elif is_thinking and not current_is_thinking:
325
+ if show_thinking:
326
+ content = f"\n</think>\n{content}"
327
+ thinking_finished = True
328
+ elif current_is_thinking:
329
+ if not show_thinking:
330
+ should_skip = True
331
+
332
+ if not should_skip:
333
+ yield make_chunk(content)
334
+
335
+ is_thinking = current_is_thinking
336
+
337
+ except (orjson.JSONDecodeError, UnicodeDecodeError) as e:
338
+ logger.warning(f"[Processor] ่งฃๆžๅคฑ่ดฅ: {e}")
339
+ continue
340
+ except Exception as e:
341
+ logger.warning(f"[Processor] ๅค„็†ๅ‡บ้”™: {e}")
342
+ continue
343
+
344
+ yield make_chunk("", "stop")
345
+ yield "data: [DONE]\n\n"
346
+ logger.info(f"[Processor] ๆตๅผๅฎŒๆˆ๏ผŒ่€—ๆ—ถ: {timeout_mgr.duration():.2f}็ง’")
347
+
348
+ except Exception as e:
349
+ logger.error(f"[Processor] ไธฅ้‡้”™่ฏฏ: {e}")
350
+ yield make_chunk(f"ๅค„็†้”™่ฏฏ: {e}", "error")
351
+ yield "data: [DONE]\n\n"
352
+ finally:
353
+ if not response_closed and hasattr(response, 'close'):
354
+ try:
355
+ response.close()
356
+ logger.debug("[Processor] ๅ“ๅบ”ๅทฒๅ…ณ้—ญ")
357
+ except Exception as e:
358
+ logger.warning(f"[Processor] ๅ…ณ้—ญๅคฑ่ดฅ: {e}")
359
+
360
+ if session:
361
+ try:
362
+ await session.close()
363
+ logger.debug("[Processor] ไผš่ฏๅทฒๅ…ณ้—ญ")
364
+ except Exception as e:
365
+ logger.warning(f"[Processor] ๅ…ณ้—ญไผš่ฏๅคฑ่ดฅ: {e}")
366
+
367
+ @staticmethod
368
+ async def _build_video_content(video_url: str, auth_token: str) -> str:
369
+ """ๆž„ๅปบ่ง†้ข‘ๅ†…ๅฎน"""
370
+ logger.debug(f"[Processor] ๆฃ€ๆต‹ๅˆฐ่ง†้ข‘: {video_url}")
371
+ full_url = f"https://assets.grok.com/{video_url}"
372
+
373
+ try:
374
+ cache_path = await video_cache_service.download_video(f"/{video_url}", auth_token)
375
+ if cache_path:
376
+ video_path = video_url.replace('/', '-')
377
+ base_url = setting.global_config.get("base_url", "")
378
+ local_url = f"{base_url}/images/{video_path}" if base_url else f"/images/{video_path}"
379
+ return f'<video src="{local_url}" controls="controls" width="500" height="300"></video>\n'
380
+ except Exception as e:
381
+ logger.warning(f"[Processor] ็ผ“ๅญ˜่ง†้ข‘ๅคฑ่ดฅ: {e}")
382
+
383
+ return f'<video src="{full_url}" controls="controls" width="500" height="300"></video>\n'
384
+
385
+ @staticmethod
386
+ async def _append_images(content: str, images: list, auth_token: str) -> str:
387
+ """่ฟฝๅŠ ๅ›พ็‰‡ๅˆฐๅ†…ๅฎน"""
388
+ image_mode = setting.global_config.get("image_mode", "url")
389
+
390
+ for img in images:
391
+ try:
392
+ if image_mode == "base64":
393
+ base64_str = await image_cache_service.download_base64(f"/{img}", auth_token)
394
+ if base64_str:
395
+ content += f"\n![Generated Image]({base64_str})"
396
+ else:
397
+ content += f"\n![Generated Image](https://assets.grok.com/{img})"
398
+ else:
399
+ cache_path = await image_cache_service.download_image(f"/{img}", auth_token)
400
+ if cache_path:
401
+ img_path = img.replace('/', '-')
402
+ base_url = setting.global_config.get("base_url", "")
403
+ img_url = f"{base_url}/images/{img_path}" if base_url else f"/images/{img_path}"
404
+ content += f"\n![Generated Image]({img_url})"
405
+ else:
406
+ content += f"\n![Generated Image](https://assets.grok.com/{img})"
407
+ except Exception as e:
408
+ logger.warning(f"[Processor] ๅค„็†ๅ›พ็‰‡ๅคฑ่ดฅ: {e}")
409
+ content += f"\n![Generated Image](https://assets.grok.com/{img})"
410
+
411
+ return content
412
+
413
+ @staticmethod
414
+ def _build_response(content: str, model: str) -> OpenAIChatCompletionResponse:
415
+ """ๆž„ๅปบๅ“ๅบ”ๅฏน่ฑก"""
416
+ return OpenAIChatCompletionResponse(
417
+ id=f"chatcmpl-{uuid.uuid4()}",
418
+ object="chat.completion",
419
+ created=int(time.time()),
420
+ model=model,
421
+ choices=[OpenAIChatCompletionChoice(
422
+ index=0,
423
+ message=OpenAIChatCompletionMessage(
424
+ role="assistant",
425
+ content=content
426
+ ),
427
+ finish_reason="stop"
428
+ )],
429
+ usage=None
430
+ )
app/services/grok/statsig.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Grok ่ฏทๆฑ‚ๅคด็ฎก็† - ็”ŸๆˆๅŠจๆ€่ฏทๆฑ‚ๅคดๅ’ŒStatsig ID"""
2
+
3
+ import base64
4
+ import random
5
+ import string
6
+ import uuid
7
+ from typing import Dict
8
+
9
+ from app.core.logger import logger
10
+ from app.core.config import setting
11
+
12
+
13
+ # ๅŸบ็ก€่ฏทๆฑ‚ๅคด
14
+ BASE_HEADERS = {
15
+ "Accept": "*/*",
16
+ "Accept-Language": "zh-CN,zh;q=0.9",
17
+ "Accept-Encoding": "gzip, deflate, br, zstd",
18
+ "Connection": "keep-alive",
19
+ "Origin": "https://grok.com",
20
+ "Priority": "u=1, i",
21
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
22
+ "Sec-Ch-Ua": '"Not(A:Brand";v="99", "Google Chrome";v="133", "Chromium";v="133"',
23
+ "Sec-Ch-Ua-Mobile": "?0",
24
+ "Sec-Ch-Ua-Platform": '"macOS"',
25
+ "Sec-Fetch-Dest": "empty",
26
+ "Sec-Fetch-Mode": "cors",
27
+ "Sec-Fetch-Site": "same-origin",
28
+ "Baggage": "sentry-environment=production,sentry-public_key=b311e0f2690c81f25e2c4cf6d4f7ce1c",
29
+ }
30
+
31
+
32
+ def _random_string(length: int, letters_only: bool = True) -> str:
33
+ """็”Ÿๆˆ้šๆœบๅญ—็ฌฆไธฒ"""
34
+ chars = string.ascii_lowercase if letters_only else string.ascii_lowercase + string.digits
35
+ return ''.join(random.choices(chars, k=length))
36
+
37
+
38
+ def _generate_statsig_id() -> str:
39
+ """็”Ÿๆˆx-statsig-id
40
+
41
+ ้šๆœบ้€‰ๆ‹ฉไธค็งๆ ผๅผ๏ผš
42
+ 1. e:TypeError: Cannot read properties of null (reading 'children['xxxxx']')
43
+ 2. e:TypeError: Cannot read properties of undefined (reading 'xxxxxxxxxx')
44
+ """
45
+ if random.choice([True, False]):
46
+ rand = _random_string(5, letters_only=False)
47
+ msg = f"e:TypeError: Cannot read properties of null (reading 'children['{rand}']')"
48
+ else:
49
+ rand = _random_string(10)
50
+ msg = f"e:TypeError: Cannot read properties of undefined (reading '{rand}')"
51
+
52
+ return base64.b64encode(msg.encode()).decode()
53
+
54
+
55
+ def get_dynamic_headers(pathname: str = "/rest/app-chat/conversations/new") -> Dict[str, str]:
56
+ """่Žทๅ–่ฏทๆฑ‚ๅคด
57
+
58
+ Args:
59
+ pathname: ่ฏทๆฑ‚่ทฏๅพ„
60
+
61
+ Returns:
62
+ ๅฎŒๆ•ด็š„่ฏทๆฑ‚ๅคดๅญ—ๅ…ธ
63
+ """
64
+ # ่Žทๅ–ๆˆ–็”Ÿๆˆstatsig-id
65
+ if setting.grok_config.get("dynamic_statsig", False):
66
+ statsig_id = _generate_statsig_id()
67
+ logger.debug(f"[Statsig] ๅŠจๆ€็”Ÿๆˆ: {statsig_id}")
68
+ else:
69
+ statsig_id = setting.grok_config.get("x_statsig_id")
70
+ if not statsig_id:
71
+ raise ValueError("้…็ฝฎๆ–‡ไปถไธญๆœช่ฎพ็ฝฎ x_statsig_id")
72
+ logger.debug(f"[Statsig] ไฝฟ็”จๅ›บๅฎšๅ€ผ: {statsig_id}")
73
+
74
+ # ๆž„ๅปบ่ฏทๆฑ‚ๅคด
75
+ headers = BASE_HEADERS.copy()
76
+ headers["x-statsig-id"] = statsig_id
77
+ headers["x-xai-request-id"] = str(uuid.uuid4())
78
+ headers["Content-Type"] = "text/plain;charset=UTF-8" if "upload-file" in pathname else "application/json"
79
+
80
+ return headers
app/services/grok/token.py ADDED
@@ -0,0 +1,619 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Grok Token ็ฎก็†ๅ™จ - ๅ•ไพ‹ๆจกๅผ็š„Token่ดŸ่ฝฝๅ‡่กกๅ’Œ็Šถๆ€็ฎก็†"""
2
+
3
+ import orjson
4
+ import time
5
+ import asyncio
6
+ import aiofiles
7
+ import portalocker
8
+ from pathlib import Path
9
+ from curl_cffi.requests import AsyncSession
10
+ from typing import Dict, Any, Optional, Tuple
11
+
12
+ from app.models.grok_models import TokenType, Models
13
+ from app.core.exception import GrokApiException
14
+ from app.core.logger import logger
15
+ from app.core.config import setting
16
+ from app.services.grok.statsig import get_dynamic_headers
17
+
18
+
19
+ # ๅธธ้‡
20
+ RATE_LIMIT_API = "https://grok.com/rest/rate-limits"
21
+ TIMEOUT = 30
22
+ BROWSER = "chrome133a"
23
+ MAX_FAILURES = 3
24
+ TOKEN_INVALID = 401
25
+ STATSIG_INVALID = 403
26
+
27
+ # ๅ†ทๅดๅธธ้‡
28
+ COOLDOWN_REQUESTS = 5 # ๆ™ฎ้€šๅคฑ่ดฅๅ†ทๅด่ฏทๆฑ‚ๆ•ฐ
29
+ COOLDOWN_429_WITH_QUOTA = 3600 # 429+ๆœ‰้ขๅบฆๅ†ทๅด1ๅฐๆ—ถ๏ผˆ็ง’๏ผ‰
30
+ COOLDOWN_429_NO_QUOTA = 36000 # 429+ๆ— ้ขๅบฆๅ†ทๅด10ๅฐๆ—ถ๏ผˆ็ง’๏ผ‰
31
+
32
+
33
+ class GrokTokenManager:
34
+ """Token็ฎก็†ๅ™จ๏ผˆๅ•ไพ‹๏ผ‰"""
35
+
36
+ _instance: Optional['GrokTokenManager'] = None
37
+ _lock = asyncio.Lock()
38
+
39
+ def __new__(cls) -> 'GrokTokenManager':
40
+ if cls._instance is None:
41
+ cls._instance = super().__new__(cls)
42
+ return cls._instance
43
+
44
+ def __init__(self):
45
+ if hasattr(self, '_initialized'):
46
+ return
47
+
48
+ self.token_file = Path(__file__).parents[3] / "data" / "token.json"
49
+ self._file_lock = asyncio.Lock()
50
+ self.token_file.parent.mkdir(parents=True, exist_ok=True)
51
+ self._storage = None
52
+ self.token_data = None # ๅปถ่ฟŸๅŠ ่ฝฝ
53
+
54
+ # ๆ‰น้‡ไฟๅญ˜้˜Ÿๅˆ—
55
+ self._save_pending = False # ๆ ‡่ฎฐๆ˜ฏๅฆๆœ‰ๅพ…ไฟๅญ˜็š„ๆ•ฐๆฎ
56
+ self._save_task = None # ๅŽๅฐไฟๅญ˜ไปปๅŠก
57
+ self._shutdown = False # ๅ…ณ้—ญๆ ‡ๅฟ—
58
+
59
+ # ๅ†ทๅด็Šถๆ€
60
+ self._cooldown_counts: Dict[str, int] = {} # Token -> ๅ‰ฉไฝ™ๅ†ทๅดๆฌกๆ•ฐ
61
+ self._request_counter = 0 # ๅ…จๅฑ€่ฏทๆฑ‚่ฎกๆ•ฐๅ™จ
62
+
63
+ # ๅˆทๆ–ฐ็Šถๆ€
64
+ self._refresh_lock = False # ๅˆทๆ–ฐ้”
65
+ self._refresh_progress: Dict[str, Any] = {"running": False, "current": 0, "total": 0, "success": 0, "failed": 0}
66
+
67
+ self._initialized = True
68
+ logger.debug(f"[Token] ๅˆๅง‹ๅŒ–ๅฎŒๆˆ: {self.token_file}")
69
+
70
+ def set_storage(self, storage) -> None:
71
+ """่ฎพ็ฝฎๅญ˜ๅ‚จๅฎžไพ‹"""
72
+ self._storage = storage
73
+
74
+ async def _load_data(self) -> None:
75
+ """ๅผ‚ๆญฅๅŠ ่ฝฝTokenๆ•ฐๆฎ๏ผˆๆ”ฏๆŒๅคš่ฟ›็จ‹๏ผ‰"""
76
+ default = {TokenType.NORMAL.value: {}, TokenType.SUPER.value: {}}
77
+
78
+ def load_sync():
79
+ with open(self.token_file, "r", encoding="utf-8") as f:
80
+ portalocker.lock(f, portalocker.LOCK_SH)
81
+ try:
82
+ return orjson.loads(f.read())
83
+ finally:
84
+ portalocker.unlock(f)
85
+
86
+ try:
87
+ if self.token_file.exists():
88
+ # ไฝฟ็”จ่ฟ›็จ‹้”่ฏปๅ–ๆ–‡ไปถ
89
+ async with self._file_lock:
90
+ self.token_data = await asyncio.to_thread(load_sync)
91
+ else:
92
+ self.token_data = default
93
+ logger.debug("[Token] ๅˆ›ๅปบๆ–ฐๆ•ฐๆฎๆ–‡ไปถ")
94
+ except Exception as e:
95
+ logger.error(f"[Token] ๅŠ ่ฝฝๅคฑ่ดฅ: {e}")
96
+ self.token_data = default
97
+
98
+ async def _save_data(self) -> None:
99
+ """ไฟๅญ˜Tokenๆ•ฐๆฎ๏ผˆๆ”ฏๆŒๅคš่ฟ›็จ‹๏ผ‰"""
100
+ def save_sync(data):
101
+ with open(self.token_file, "w", encoding="utf-8") as f:
102
+ portalocker.lock(f, portalocker.LOCK_EX)
103
+ try:
104
+ content = orjson.dumps(data, option=orjson.OPT_INDENT_2).decode()
105
+ f.write(content)
106
+ f.flush()
107
+ finally:
108
+ portalocker.unlock(f)
109
+
110
+ try:
111
+ if not self._storage:
112
+ async with self._file_lock:
113
+ await asyncio.to_thread(save_sync, self.token_data)
114
+ else:
115
+ await self._storage.save_tokens(self.token_data)
116
+ except Exception as e:
117
+ logger.error(f"[Token] ไฟๅญ˜ๅคฑ่ดฅ: {e}")
118
+ raise GrokApiException(f"ไฟๅญ˜ๅคฑ่ดฅ: {e}", "TOKEN_SAVE_ERROR")
119
+
120
+ def _mark_dirty(self) -> None:
121
+ """ๆ ‡่ฎฐๆœ‰ๅพ…ไฟๅญ˜็š„ๆ•ฐๆฎ"""
122
+ self._save_pending = True
123
+
124
+ async def _batch_save_worker(self) -> None:
125
+ """ๆ‰น้‡ไฟๅญ˜ๅŽๅฐไปปๅŠก"""
126
+ from app.core.config import setting
127
+
128
+ interval = setting.global_config.get("batch_save_interval", 1.0)
129
+ logger.info(f"[Token] ๅญ˜ๅ‚จไปปๅŠกๅทฒๅฏๅŠจ๏ผŒ้—ด้š”: {interval}s")
130
+
131
+ while not self._shutdown:
132
+ await asyncio.sleep(interval)
133
+
134
+ if self._save_pending and not self._shutdown:
135
+ try:
136
+ await self._save_data()
137
+ self._save_pending = False
138
+ logger.debug("[Token] ๅญ˜ๅ‚จๅฎŒๆˆ")
139
+ except Exception as e:
140
+ logger.error(f"[Token] ๅญ˜ๅ‚จๅคฑ่ดฅ: {e}")
141
+
142
+ async def start_batch_save(self) -> None:
143
+ """ๅฏๅŠจๆ‰น้‡ไฟๅญ˜ไปปๅŠก"""
144
+ if self._save_task is None:
145
+ self._save_task = asyncio.create_task(self._batch_save_worker())
146
+ logger.info("[Token] ๅญ˜ๅ‚จไปปๅŠกๅทฒๅˆ›ๅปบ")
147
+
148
+ async def shutdown(self) -> None:
149
+ """ๅ…ณ้—ญๅนถๅˆทๆ–ฐๆ‰€ๆœ‰ๅพ…ไฟๅญ˜ๆ•ฐๆฎ"""
150
+ self._shutdown = True
151
+
152
+ if self._save_task:
153
+ self._save_task.cancel()
154
+ try:
155
+ await self._save_task
156
+ except asyncio.CancelledError:
157
+ pass
158
+
159
+ # ๆœ€็ปˆๅˆทๆ–ฐ
160
+ if self._save_pending:
161
+ await self._save_data()
162
+ logger.info("[Token] ๅ…ณ้—ญๆ—ถๅˆทๆ–ฐๅฎŒๆˆ")
163
+
164
+ @staticmethod
165
+ def _extract_sso(auth_token: str) -> Optional[str]:
166
+ """ๆๅ–SSOๅ€ผ"""
167
+ if "sso=" in auth_token:
168
+ return auth_token.split("sso=")[1].split(";")[0]
169
+ logger.warning("[Token] ๆ— ๆณ•ๆๅ–SSOๅ€ผ")
170
+ return None
171
+
172
+ def _find_token(self, sso: str) -> Tuple[Optional[str], Optional[Dict]]:
173
+ """ๆŸฅๆ‰พToken"""
174
+ for token_type in [TokenType.NORMAL.value, TokenType.SUPER.value]:
175
+ if sso in self.token_data[token_type]:
176
+ return token_type, self.token_data[token_type][sso]
177
+ return None, None
178
+
179
+ async def add_token(self, tokens: list[str], token_type: TokenType) -> None:
180
+ """ๆทปๅŠ Token"""
181
+ if not tokens:
182
+ return
183
+
184
+ count = 0
185
+ for token in tokens:
186
+ if not token or not token.strip():
187
+ continue
188
+
189
+ self.token_data[token_type.value][token] = {
190
+ "createdTime": int(time.time() * 1000),
191
+ "remainingQueries": -1,
192
+ "heavyremainingQueries": -1,
193
+ "status": "active",
194
+ "failedCount": 0,
195
+ "lastFailureTime": None,
196
+ "lastFailureReason": None,
197
+ "tags": [],
198
+ "note": ""
199
+ }
200
+ count += 1
201
+
202
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
203
+ logger.info(f"[Token] ๆทปๅŠ  {count} ไธช {token_type.value} Token")
204
+
205
+ async def delete_token(self, tokens: list[str], token_type: TokenType) -> None:
206
+ """ๅˆ ้™คToken"""
207
+ if not tokens:
208
+ return
209
+
210
+ count = 0
211
+ for token in tokens:
212
+ if token in self.token_data[token_type.value]:
213
+ del self.token_data[token_type.value][token]
214
+ count += 1
215
+
216
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
217
+ logger.info(f"[Token] ๅˆ ้™ค {count} ไธช {token_type.value} Token")
218
+
219
+ async def update_token_tags(self, token: str, token_type: TokenType, tags: list[str]) -> None:
220
+ """ๆ›ดๆ–ฐTokenๆ ‡็ญพ"""
221
+ if token not in self.token_data[token_type.value]:
222
+ raise GrokApiException("Tokenไธๅญ˜ๅœจ", "TOKEN_NOT_FOUND", {"token": token[:10]})
223
+
224
+ cleaned = [t.strip() for t in tags if t and t.strip()]
225
+ self.token_data[token_type.value][token]["tags"] = cleaned
226
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
227
+ logger.info(f"[Token] ๆ›ดๆ–ฐๆ ‡็ญพ: {token[:10]}... -> {cleaned}")
228
+
229
+ async def update_token_note(self, token: str, token_type: TokenType, note: str) -> None:
230
+ """ๆ›ดๆ–ฐTokenๅค‡ๆณจ"""
231
+ if token not in self.token_data[token_type.value]:
232
+ raise GrokApiException("Tokenไธๅญ˜ๅœจ", "TOKEN_NOT_FOUND", {"token": token[:10]})
233
+
234
+ self.token_data[token_type.value][token]["note"] = note.strip()
235
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
236
+ logger.info(f"[Token] ๆ›ดๆ–ฐๅค‡ๆณจ: {token[:10]}...")
237
+
238
+ def get_tokens(self) -> Dict[str, Any]:
239
+ """่Žทๅ–ๆ‰€ๆœ‰Token"""
240
+ return self.token_data.copy()
241
+
242
+ async def _reload_if_needed(self) -> None:
243
+ """ๅœจๅคš่ฟ›็จ‹ๆจกๅผไธ‹้‡ๆ–ฐๅŠ ่ฝฝๆ•ฐๆฎ"""
244
+ # ๅชๅœจๆ–‡ไปถๆจกๅผไธ”ๅคš่ฟ›็จ‹็Žฏๅขƒไธ‹ๆ‰้‡ๆ–ฐๅŠ ่ฝฝ
245
+ if self._storage:
246
+ return
247
+
248
+ def reload_sync():
249
+ with open(self.token_file, "r", encoding="utf-8") as f:
250
+ portalocker.lock(f, portalocker.LOCK_SH)
251
+ try:
252
+ return orjson.loads(f.read())
253
+ finally:
254
+ portalocker.unlock(f)
255
+
256
+ try:
257
+ if self.token_file.exists():
258
+ self.token_data = await asyncio.to_thread(reload_sync)
259
+ except Exception as e:
260
+ logger.warning(f"[Token] ้‡ๆ–ฐๅŠ ่ฝฝๅคฑ่ดฅ: {e}")
261
+
262
+ async def get_token(self, model: str) -> str:
263
+ """่Žทๅ–Token"""
264
+ jwt = await self.select_token(model)
265
+ return f"sso-rw={jwt};sso={jwt}"
266
+
267
+ async def select_token(self, model: str) -> str:
268
+ """้€‰ๆ‹ฉๆœ€ไผ˜Token๏ผˆๅคš่ฟ›็จ‹ๅฎ‰ๅ…จ๏ผŒๆ”ฏๆŒๅ†ทๅด๏ผ‰"""
269
+ # ้‡ๆ–ฐๅŠ ่ฝฝๆœ€ๆ–ฐๆ•ฐๆฎ๏ผˆๅคš่ฟ›็จ‹ๆจกๅผ๏ผ‰
270
+ await self._reload_if_needed()
271
+
272
+ # ้€’ๅ‡ๆ‰€ๆœ‰ๆฌกๆ•ฐๅ†ทๅด่ฎกๆ•ฐ
273
+ self._request_counter += 1
274
+ for token in list(self._cooldown_counts.keys()):
275
+ self._cooldown_counts[token] -= 1
276
+ if self._cooldown_counts[token] <= 0:
277
+ del self._cooldown_counts[token]
278
+ logger.debug(f"[Token] ๅ†ทๅด็ป“ๆŸ: {token[:10]}...")
279
+
280
+ current_time = time.time() * 1000 # ๆฏซ็ง’
281
+
282
+ def select_best(tokens: Dict[str, Any], field: str) -> Tuple[Optional[str], Optional[int]]:
283
+ """้€‰ๆ‹ฉๆœ€ไฝณToken"""
284
+ unused, used = [], []
285
+
286
+ for key, data in tokens.items():
287
+ # ่ทณ่ฟ‡ๅทฒๅคฑๆ•ˆ็š„token
288
+ if data.get("status") == "expired":
289
+ continue
290
+
291
+ # ่ทณ่ฟ‡ๅคฑ่ดฅๆฌกๆ•ฐ่ฟ‡ๅคš็š„token๏ผˆไปปไฝ•้”™่ฏฏ็Šถๆ€็ ๏ผ‰
292
+ if data.get("failedCount", 0) >= MAX_FAILURES:
293
+ continue
294
+
295
+ # ่ทณ่ฟ‡ๆฌกๆ•ฐๅ†ทๅดไธญ็š„token
296
+ if key in self._cooldown_counts:
297
+ continue
298
+
299
+ # ่ทณ่ฟ‡ๆ—ถ้—ดๅ†ทๅดไธญ็š„token๏ผˆ429๏ผ‰
300
+ cooldown_until = data.get("cooldownUntil", 0)
301
+ if cooldown_until and cooldown_until > current_time:
302
+ continue
303
+
304
+ remaining = int(data.get(field, -1))
305
+ if remaining == 0:
306
+ continue
307
+
308
+ if remaining == -1:
309
+ unused.append(key)
310
+ elif remaining > 0:
311
+ used.append((key, remaining))
312
+
313
+ if unused:
314
+ return unused[0], -1
315
+ if used:
316
+ used.sort(key=lambda x: x[1], reverse=True)
317
+ return used[0][0], used[0][1]
318
+ return None, None
319
+
320
+ # ๅฟซ็…ง
321
+ snapshot = {
322
+ TokenType.NORMAL.value: self.token_data[TokenType.NORMAL.value].copy(),
323
+ TokenType.SUPER.value: self.token_data[TokenType.SUPER.value].copy()
324
+ }
325
+
326
+ # ้€‰ๆ‹ฉ็ญ–็•ฅ
327
+ if model == "grok-4-heavy":
328
+ field = "heavyremainingQueries"
329
+ token_key, remaining = select_best(snapshot[TokenType.SUPER.value], field)
330
+ else:
331
+ field = "remainingQueries"
332
+ token_key, remaining = select_best(snapshot[TokenType.NORMAL.value], field)
333
+ if token_key is None:
334
+ token_key, remaining = select_best(snapshot[TokenType.SUPER.value], field)
335
+
336
+ if token_key is None:
337
+ raise GrokApiException(
338
+ f"ๆฒกๆœ‰ๅฏ็”จToken: {model}",
339
+ "NO_AVAILABLE_TOKEN",
340
+ {
341
+ "model": model,
342
+ "normal": len(snapshot[TokenType.NORMAL.value]),
343
+ "super": len(snapshot[TokenType.SUPER.value]),
344
+ "cooldown_count": len(self._cooldown_counts)
345
+ }
346
+ )
347
+
348
+ status = "ๆœชไฝฟ็”จ" if remaining == -1 else f"ๅ‰ฉไฝ™{remaining}ๆฌก"
349
+ logger.debug(f"[Token] ๅˆ†้…Token: {model} ({status})")
350
+ return token_key
351
+
352
+ async def check_limits(self, auth_token: str, model: str) -> Optional[Dict[str, Any]]:
353
+ """ๆฃ€ๆŸฅ้€Ÿ็އ้™ๅˆถ"""
354
+ try:
355
+ rate_model = Models.to_rate_limit(model)
356
+ payload = {"requestKind": "DEFAULT", "modelName": rate_model}
357
+
358
+ cf = setting.grok_config.get("cf_clearance", "")
359
+ headers = get_dynamic_headers("/rest/rate-limits")
360
+ headers["Cookie"] = f"{auth_token};{cf}" if cf else auth_token
361
+
362
+ # ๅค–ๅฑ‚้‡่ฏ•๏ผšๅฏ้…็ฝฎ็Šถๆ€็ ๏ผˆ401/429็ญ‰๏ผ‰
363
+ retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
364
+ MAX_OUTER_RETRY = 3
365
+
366
+ for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎž้™…้‡่ฏ•3ๆฌก
367
+ # ๅ†…ๅฑ‚้‡่ฏ•๏ผš403ไปฃ็†ๆฑ ้‡่ฏ•
368
+ max_403_retries = 5
369
+ retry_403_count = 0
370
+
371
+ while retry_403_count <= max_403_retries:
372
+ # ๅผ‚ๆญฅ่Žทๅ–ไปฃ็†๏ผˆๆ”ฏๆŒไปฃ็†ๆฑ ๏ผ‰
373
+ from app.core.proxy_pool import proxy_pool
374
+
375
+ # ๅฆ‚ๆžœๆ˜ฏ403้‡่ฏ•ไธ”ไฝฟ็”จไปฃ็†ๆฑ ๏ผŒๅผบๅˆถๅˆทๆ–ฐไปฃ็†
376
+ if retry_403_count > 0 and proxy_pool._enabled:
377
+ logger.info(f"[Token] 403้‡่ฏ• {retry_403_count}/{max_403_retries}๏ผŒๅˆทๆ–ฐไปฃ็†...")
378
+ proxy = await proxy_pool.force_refresh()
379
+ else:
380
+ proxy = await setting.get_proxy_async("service")
381
+
382
+ proxies = {"http": proxy, "https": proxy} if proxy else None
383
+
384
+ async with AsyncSession() as session:
385
+ response = await session.post(
386
+ RATE_LIMIT_API,
387
+ headers=headers,
388
+ json=payload,
389
+ impersonate=BROWSER,
390
+ timeout=TIMEOUT,
391
+ proxies=proxies
392
+ )
393
+
394
+ # ๅ†…ๅฑ‚403้‡่ฏ•๏ผšไป…ๅฝ“ๆœ‰ไปฃ็†ๆฑ ๆ—ถ่งฆๅ‘
395
+ if response.status_code == 403 and proxy_pool._enabled:
396
+ retry_403_count += 1
397
+
398
+ if retry_403_count <= max_403_retries:
399
+ logger.warning(f"[Token] ้‡ๅˆฐ403้”™่ฏฏ๏ผŒๆญฃๅœจ้‡่ฏ• ({retry_403_count}/{max_403_retries})...")
400
+ await asyncio.sleep(0.5)
401
+ continue
402
+
403
+ # ๅ†…ๅฑ‚้‡่ฏ•ๅ…จ้ƒจๅคฑ่ดฅ
404
+ logger.error(f"[Token] 403้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{retry_403_count-1}ๆฌก๏ผŒๆ”พๅผƒ")
405
+ sso = self._extract_sso(auth_token)
406
+ if sso:
407
+ await self.record_failure(auth_token, 403, "ๆœๅŠกๅ™จ่ขซBlock")
408
+
409
+ # ๆฃ€ๆŸฅๅฏ้…็ฝฎ็Šถๆ€็ ้”™่ฏฏ - ๅค–ๅฑ‚้‡่ฏ•
410
+ if response.status_code in retry_codes:
411
+ if outer_retry < MAX_OUTER_RETRY:
412
+ delay = (outer_retry + 1) * 0.1 # ๆธ่ฟ›ๅปถ่ฟŸ๏ผš0.1s, 0.2s, 0.3s
413
+ logger.warning(f"[Token] ้‡ๅˆฐ{response.status_code}้”™่ฏฏ๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผŒ็ญ‰ๅพ…{delay}s...")
414
+ await asyncio.sleep(delay)
415
+ break # ่ทณๅ‡บๅ†…ๅฑ‚ๅพช็Žฏ๏ผŒ่ฟ›ๅ…ฅๅค–ๅฑ‚้‡่ฏ•
416
+ else:
417
+ logger.error(f"[Token] {response.status_code}้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{outer_retry}ๆฌก๏ผŒๆ”พๅผƒ")
418
+ sso = self._extract_sso(auth_token)
419
+ if sso:
420
+ if response.status_code == 401:
421
+ await self.record_failure(auth_token, 401, "Tokenๅคฑๆ•ˆ")
422
+ else:
423
+ await self.record_failure(auth_token, response.status_code, f"้”™่ฏฏ: {response.status_code}")
424
+ return None
425
+
426
+ if response.status_code == 200:
427
+ data = response.json()
428
+ sso = self._extract_sso(auth_token)
429
+
430
+ if outer_retry > 0 or retry_403_count > 0:
431
+ logger.info(f"[Token] ้‡่ฏ•ๆˆๅŠŸ๏ผ")
432
+
433
+ if sso:
434
+ if model == "grok-4-heavy":
435
+ await self.update_limits(sso, normal=None, heavy=data.get("remainingQueries", -1))
436
+ logger.info(f"[Token] ๆ›ดๆ–ฐ้™ๅˆถ: {sso[:10]}..., heavy={data.get('remainingQueries', -1)}")
437
+ else:
438
+ await self.update_limits(sso, normal=data.get("remainingTokens", -1), heavy=None)
439
+ logger.info(f"[Token] ๆ›ดๆ–ฐ้™ๅˆถ: {sso[:10]}..., basic={data.get('remainingTokens', -1)}")
440
+
441
+ return data
442
+ else:
443
+ # ๅ…ถไป–้”™่ฏฏ
444
+ logger.warning(f"[Token] ่Žทๅ–้™ๅˆถๅคฑ่ดฅ: {response.status_code}")
445
+ sso = self._extract_sso(auth_token)
446
+ if sso:
447
+ await self.record_failure(auth_token, response.status_code, f"้”™่ฏฏ: {response.status_code}")
448
+ return None
449
+
450
+ except Exception as e:
451
+ logger.error(f"[Token] ๆฃ€ๆŸฅ้™ๅˆถ้”™่ฏฏ: {e}")
452
+ return None
453
+
454
+ async def update_limits(self, sso: str, normal: Optional[int] = None, heavy: Optional[int] = None) -> None:
455
+ """ๆ›ดๆ–ฐ้™ๅˆถ"""
456
+ try:
457
+ for token_type in [TokenType.NORMAL.value, TokenType.SUPER.value]:
458
+ if sso in self.token_data[token_type]:
459
+ if normal is not None:
460
+ self.token_data[token_type][sso]["remainingQueries"] = normal
461
+ if heavy is not None:
462
+ self.token_data[token_type][sso]["heavyremainingQueries"] = heavy
463
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
464
+ logger.info(f"[Token] ๆ›ดๆ–ฐ้™ๅˆถ: {sso[:10]}...")
465
+ return
466
+ logger.warning(f"[Token] ๆœชๆ‰พๅˆฐ: {sso[:10]}...")
467
+ except Exception as e:
468
+ logger.error(f"[Token] ๆ›ดๆ–ฐ้™ๅˆถ้”™่ฏฏ: {e}")
469
+
470
+ async def record_failure(self, auth_token: str, status: int, msg: str) -> None:
471
+ """่ฎฐๅฝ•ๅคฑ่ดฅ"""
472
+ try:
473
+ if status == STATSIG_INVALID:
474
+ logger.warning("[Token] IP่ขซBlock๏ผŒ่ฏท: 1.ๆ›ดๆขIP 2.ไฝฟ็”จไปฃ็† 3.้…็ฝฎCFๅ€ผ")
475
+ return
476
+
477
+ sso = self._extract_sso(auth_token)
478
+ if not sso:
479
+ return
480
+
481
+ _, data = self._find_token(sso)
482
+ if not data:
483
+ logger.warning(f"[Token] ๆœชๆ‰พๅˆฐ: {sso[:10]}...")
484
+ return
485
+
486
+ data["failedCount"] = data.get("failedCount", 0) + 1
487
+ data["lastFailureTime"] = int(time.time() * 1000)
488
+ data["lastFailureReason"] = f"{status}: {msg}"
489
+
490
+ logger.warning(
491
+ f"[Token] ๅคฑ่ดฅ: {sso[:10]}... (็Šถๆ€:{status}), "
492
+ f"ๆฌกๆ•ฐ: {data['failedCount']}/{MAX_FAILURES}, ๅŽŸๅ› : {msg}"
493
+ )
494
+
495
+ if 400 <= status < 500 and data["failedCount"] >= MAX_FAILURES:
496
+ data["status"] = "expired"
497
+ logger.error(f"[Token] ๆ ‡่ฎฐๅคฑๆ•ˆ: {sso[:10]}... (่ฟž็ปญ{status}้”™่ฏฏ{data['failedCount']}ๆฌก)")
498
+
499
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
500
+
501
+ except Exception as e:
502
+ logger.error(f"[Token] ่ฎฐๅฝ•ๅคฑ่ดฅ้”™่ฏฏ: {e}")
503
+
504
+ async def reset_failure(self, auth_token: str) -> None:
505
+ """้‡็ฝฎๅคฑ่ดฅ่ฎกๆ•ฐ"""
506
+ try:
507
+ sso = self._extract_sso(auth_token)
508
+ if not sso:
509
+ return
510
+
511
+ _, data = self._find_token(sso)
512
+ if not data:
513
+ return
514
+
515
+ if data.get("failedCount", 0) > 0:
516
+ data["failedCount"] = 0
517
+ data["lastFailureTime"] = None
518
+ data["lastFailureReason"] = None
519
+ self._mark_dirty() # ๆ‰น้‡ไฟๅญ˜
520
+ logger.info(f"[Token] ้‡็ฝฎๅคฑ่ดฅ่ฎกๆ•ฐ: {sso[:10]}...")
521
+
522
+ except Exception as e:
523
+ logger.error(f"[Token] ้‡็ฝฎๅคฑ่ดฅ้”™่ฏฏ: {e}")
524
+
525
+ async def apply_cooldown(self, auth_token: str, status_code: int) -> None:
526
+ """ๅบ”็”จๅ†ทๅด็ญ–็•ฅ
527
+ - 429 ้”™่ฏฏ๏ผšไฝฟ็”จๆ—ถ้—ดๅ†ทๅด๏ผˆๆœ‰้ขๅบฆ1ๅฐๆ—ถ๏ผŒๆ— ้ขๅบฆ10ๅฐๆ—ถ๏ผ‰
528
+ - ๅ…ถไป–้”™่ฏฏ๏ผšไฝฟ็”จๆฌกๆ•ฐๅ†ทๅด๏ผˆ5ๆฌก่ฏทๆฑ‚๏ผ‰
529
+ """
530
+ try:
531
+ sso = self._extract_sso(auth_token)
532
+ if not sso:
533
+ return
534
+
535
+ _, data = self._find_token(sso)
536
+ if not data:
537
+ return
538
+
539
+ remaining = data.get("remainingQueries", -1)
540
+
541
+ if status_code == 429:
542
+ # 429 ไฝฟ็”จๆ—ถ้—ดๅ†ทๅด
543
+ if remaining > 0 or remaining == -1:
544
+ # ๆœ‰้ขๅบฆ๏ผšๅ†ทๅด1ๅฐๆ—ถ
545
+ cooldown_until = time.time() + COOLDOWN_429_WITH_QUOTA
546
+ logger.info(f"[Token] 429ๅ†ทๅด(ๆœ‰้ขๅบฆ): {sso[:10]}... ๅ†ทๅด1ๅฐๆ—ถ")
547
+ else:
548
+ # ๆ— ้ขๅบฆ๏ผšๅ†ทๅด10ๅฐๆ—ถ
549
+ cooldown_until = time.time() + COOLDOWN_429_NO_QUOTA
550
+ logger.info(f"[Token] 429ๅ†ทๅด(ๆ— ้ขๅบฆ): {sso[:10]}... ๅ†ทๅด10ๅฐๆ—ถ")
551
+ data["cooldownUntil"] = int(cooldown_until * 1000)
552
+ self._mark_dirty()
553
+ else:
554
+ # ๅ…ถไป–้”™่ฏฏไฝฟ็”จๆฌกๆ•ฐๅ†ทๅด๏ผˆๆœ‰้ขๅบฆๆ—ถๆ‰ๅ†ทๅด๏ผ‰
555
+ if remaining != 0:
556
+ self._cooldown_counts[sso] = COOLDOWN_REQUESTS
557
+ logger.info(f"[Token] ๆฌกๆ•ฐๅ†ทๅด: {sso[:10]}... ๅ†ทๅด{COOLDOWN_REQUESTS}ๆฌก่ฏทๆฑ‚")
558
+
559
+ except Exception as e:
560
+ logger.error(f"[Token] ๅบ”็”จๅ†ทๅด้”™่ฏฏ: {e}")
561
+
562
+ async def refresh_all_limits(self) -> Dict[str, Any]:
563
+ """ๅˆทๆ–ฐๆ‰€ๆœ‰ Token ็š„ๅ‰ฉไฝ™ๆฌกๆ•ฐ"""
564
+ # ๆฃ€ๆŸฅๆ˜ฏๅฆๅทฒๅœจๅˆทๆ–ฐ
565
+ if self._refresh_lock:
566
+ return {"error": "refresh_in_progress", "message": "ๅทฒๆœ‰ๅˆทๆ–ฐไปปๅŠกๅœจ่ฟ›่กŒไธญ", "progress": self._refresh_progress}
567
+
568
+ # ่Žทๅ–้”
569
+ self._refresh_lock = True
570
+
571
+ try:
572
+ # ่ฎก็ฎ—ๆ€ปๆ•ฐ
573
+ all_tokens = []
574
+ for token_type in [TokenType.NORMAL.value, TokenType.SUPER.value]:
575
+ for sso in list(self.token_data[token_type].keys()):
576
+ all_tokens.append((token_type, sso))
577
+
578
+ total = len(all_tokens)
579
+ self._refresh_progress = {"running": True, "current": 0, "total": total, "success": 0, "failed": 0}
580
+
581
+ success_count = 0
582
+ fail_count = 0
583
+
584
+ for i, (token_type, sso) in enumerate(all_tokens):
585
+ auth_token = f"sso-rw={sso};sso={sso}"
586
+ try:
587
+ result = await self.check_limits(auth_token, "grok-4-fast")
588
+ if result:
589
+ success_count += 1
590
+ else:
591
+ fail_count += 1
592
+ except Exception as e:
593
+ logger.warning(f"[Token] ๅˆทๆ–ฐๅคฑ่ดฅ: {sso[:10]}... - {e}")
594
+ fail_count += 1
595
+
596
+ # ๆ›ดๆ–ฐ่ฟ›ๅบฆ
597
+ self._refresh_progress = {
598
+ "running": True,
599
+ "current": i + 1,
600
+ "total": total,
601
+ "success": success_count,
602
+ "failed": fail_count
603
+ }
604
+ await asyncio.sleep(0.1) # ้ฟๅ…่ฏทๆฑ‚่ฟ‡ๅฟซ
605
+
606
+ logger.info(f"[Token] ๆ‰น้‡ๅˆทๆ–ฐๅฎŒๆˆ: ๆˆๅŠŸ{success_count}, ๅคฑ่ดฅ{fail_count}")
607
+ self._refresh_progress = {"running": False, "current": total, "total": total, "success": success_count, "failed": fail_count}
608
+ return {"success": success_count, "failed": fail_count, "total": total}
609
+
610
+ finally:
611
+ self._refresh_lock = False
612
+
613
+ def get_refresh_progress(self) -> Dict[str, Any]:
614
+ """่Žทๅ–ๅˆทๆ–ฐ่ฟ›ๅบฆ"""
615
+ return self._refresh_progress.copy()
616
+
617
+
618
+ # ๅ…จๅฑ€ๅฎžไพ‹
619
+ token_manager = GrokTokenManager()
app/services/grok/upload.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ๅ›พ็‰‡ไธŠไผ ็ฎก็†ๅ™จ - ๆ”ฏๆŒBase64ๅ’ŒURLๅ›พ็‰‡ไธŠไผ """
2
+
3
+ import asyncio
4
+ import base64
5
+ import re
6
+ from typing import Tuple, Optional
7
+ from urllib.parse import urlparse
8
+ from curl_cffi.requests import AsyncSession
9
+
10
+ from app.services.grok.statsig import get_dynamic_headers
11
+ from app.core.exception import GrokApiException
12
+ from app.core.config import setting
13
+ from app.core.logger import logger
14
+
15
+
16
+ # ๅธธ้‡
17
+ UPLOAD_API = "https://grok.com/rest/app-chat/upload-file"
18
+ TIMEOUT = 30
19
+ BROWSER = "chrome133a"
20
+
21
+ # MIME็ฑปๅž‹
22
+ MIME_TYPES = {
23
+ '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.png': 'image/png',
24
+ '.gif': 'image/gif', '.webp': 'image/webp', '.bmp': 'image/bmp',
25
+ }
26
+ DEFAULT_MIME = "image/jpeg"
27
+ DEFAULT_EXT = "jpg"
28
+
29
+
30
+ class ImageUploadManager:
31
+ """ๅ›พ็‰‡ไธŠไผ ็ฎก็†ๅ™จ"""
32
+
33
+ @staticmethod
34
+ async def upload(image_input: str, auth_token: str) -> Tuple[str, str]:
35
+ """ไธŠไผ ๅ›พ็‰‡๏ผˆๆ”ฏๆŒBase64ๆˆ–URL๏ผ‰
36
+
37
+ Returns:
38
+ (file_id, file_uri) ๅ…ƒ็ป„
39
+ """
40
+ try:
41
+ # ๅˆคๆ–ญ็ฑปๅž‹ๅนถๅค„็†
42
+ if ImageUploadManager._is_url(image_input):
43
+ buffer, mime = await ImageUploadManager._download(image_input)
44
+ filename, _ = ImageUploadManager._get_info("", mime)
45
+ else:
46
+ buffer = image_input.split(",")[1] if "data:image" in image_input else image_input
47
+ filename, mime = ImageUploadManager._get_info(image_input)
48
+
49
+ # ๆž„ๅปบๆ•ฐๆฎ
50
+ data = {
51
+ "fileName": filename,
52
+ "fileMimeType": mime,
53
+ "content": buffer,
54
+ }
55
+
56
+
57
+ if not auth_token:
58
+ raise GrokApiException("่ฎค่ฏไปค็‰Œ็ผบๅคฑ", "NO_AUTH_TOKEN")
59
+
60
+ # ๅค–ๅฑ‚้‡่ฏ•๏ผšๅฏ้…็ฝฎ็Šถๆ€็ ๏ผˆ401/429็ญ‰๏ผ‰
61
+ retry_codes = setting.grok_config.get("retry_status_codes", [401, 429])
62
+ MAX_OUTER_RETRY = 3
63
+
64
+ for outer_retry in range(MAX_OUTER_RETRY + 1): # +1 ็กฎไฟๅฎž้™…้‡่ฏ•3ๆฌก
65
+ try:
66
+ # ๅ†…ๅฑ‚้‡่ฏ•๏ผš403ไปฃ็†ๆฑ ้‡่ฏ•
67
+ max_403_retries = 5
68
+ retry_403_count = 0
69
+
70
+ while retry_403_count <= max_403_retries:
71
+ # ่ฏทๆฑ‚้…็ฝฎ
72
+ cf = setting.grok_config.get("cf_clearance", "")
73
+ headers = {
74
+ **get_dynamic_headers("/rest/app-chat/upload-file"),
75
+ "Cookie": f"{auth_token};{cf}" if cf else auth_token,
76
+ }
77
+
78
+ # ๅผ‚ๆญฅ่Žทๅ–ไปฃ็†๏ผˆๆ”ฏๆŒไปฃ็†ๆฑ ๏ผ‰
79
+ from app.core.proxy_pool import proxy_pool
80
+
81
+ # ๅฆ‚ๆžœๆ˜ฏ403้‡่ฏ•ไธ”ไฝฟ็”จไปฃ็†ๆฑ ๏ผŒๅผบๅˆถๅˆทๆ–ฐไปฃ็†
82
+ if retry_403_count > 0 and proxy_pool._enabled:
83
+ logger.info(f"[Upload] 403้‡่ฏ• {retry_403_count}/{max_403_retries}๏ผŒๅˆทๆ–ฐไปฃ็†...")
84
+ proxy = await proxy_pool.force_refresh()
85
+ else:
86
+ proxy = await setting.get_proxy_async("service")
87
+
88
+ proxies = {"http": proxy, "https": proxy} if proxy else None
89
+
90
+ # ไธŠไผ 
91
+ async with AsyncSession() as session:
92
+ response = await session.post(
93
+ UPLOAD_API,
94
+ headers=headers,
95
+ json=data,
96
+ impersonate=BROWSER,
97
+ timeout=TIMEOUT,
98
+ proxies=proxies,
99
+ )
100
+
101
+ # ๅ†…ๅฑ‚403้‡่ฏ•๏ผšไป…ๅฝ“ๆœ‰ไปฃ็†ๆฑ ๆ—ถ่งฆๅ‘
102
+ if response.status_code == 403 and proxy_pool._enabled:
103
+ retry_403_count += 1
104
+
105
+ if retry_403_count <= max_403_retries:
106
+ logger.warning(f"[Upload] ้‡ๅˆฐ403้”™่ฏฏ๏ผŒๆญฃๅœจ้‡่ฏ• ({retry_403_count}/{max_403_retries})...")
107
+ await asyncio.sleep(0.5)
108
+ continue
109
+
110
+ # ๅ†…ๅฑ‚้‡่ฏ•ๅ…จ้ƒจๅคฑ่ดฅ
111
+ logger.error(f"[Upload] 403้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{retry_403_count-1}ๆฌก๏ผŒๆ”พๅผƒ")
112
+
113
+ # ๆฃ€ๆŸฅๅฏ้…็ฝฎ็Šถๆ€็ ้”™่ฏฏ - ๅค–ๅฑ‚้‡่ฏ•
114
+ if response.status_code in retry_codes:
115
+ if outer_retry < MAX_OUTER_RETRY:
116
+ delay = (outer_retry + 1) * 0.1 # ๆธ่ฟ›ๅปถ่ฟŸ๏ผš0.1s, 0.2s, 0.3s
117
+ logger.warning(f"[Upload] ้‡ๅˆฐ{response.status_code}้”™่ฏฏ๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})๏ผŒ็ญ‰ๅพ…{delay}s...")
118
+ await asyncio.sleep(delay)
119
+ break # ่ทณๅ‡บๅ†…ๅฑ‚ๅพช็Žฏ๏ผŒ่ฟ›ๅ…ฅๅค–ๅฑ‚้‡่ฏ•
120
+ else:
121
+ logger.error(f"[Upload] {response.status_code}้”™่ฏฏ๏ผŒๅทฒ้‡่ฏ•{outer_retry}ๆฌก๏ผŒๆ”พๅผƒ")
122
+ return "", ""
123
+
124
+ if response.status_code == 200:
125
+ result = response.json()
126
+ file_id = result.get("fileMetadataId", "")
127
+ file_uri = result.get("fileUri", "")
128
+
129
+ if outer_retry > 0 or retry_403_count > 0:
130
+ logger.info(f"[Upload] ้‡่ฏ•ๆˆๅŠŸ๏ผ")
131
+
132
+ logger.debug(f"[Upload] ๆˆๅŠŸ๏ผŒID: {file_id}")
133
+ return file_id, file_uri
134
+
135
+ # ๅ…ถไป–้”™่ฏฏ็›ดๆŽฅ่ฟ”ๅ›ž
136
+ logger.error(f"[Upload] ๅคฑ่ดฅ๏ผŒ็Šถๆ€็ : {response._status_code}")
137
+ return "", ""
138
+
139
+ # ๅ†…ๅฑ‚ๅพช็Žฏๆญฃๅธธ็ป“ๆŸ๏ผˆ้žbreak๏ผ‰๏ผŒ่ฏดๆ˜Ž403้‡่ฏ•ๅ…จ้ƒจๅคฑ่ดฅ
140
+ return "", ""
141
+
142
+ except Exception as e:
143
+ if outer_retry < MAX_OUTER_RETRY - 1:
144
+ logger.warning(f"[Upload] ๅผ‚ๅธธ: {e}๏ผŒๅค–ๅฑ‚้‡่ฏ• ({outer_retry+1}/{MAX_OUTER_RETRY})...")
145
+ await asyncio.sleep(0.5)
146
+ continue
147
+
148
+ logger.warning(f"[Upload] ๅคฑ่ดฅ: {e}")
149
+ return "", ""
150
+
151
+ return "", ""
152
+
153
+ except Exception as e:
154
+ logger.warning(f"[Upload] ๅคฑ่ดฅ: {e}")
155
+ return "", ""
156
+
157
+ @staticmethod
158
+ def _is_url(input_str: str) -> bool:
159
+ """ๆฃ€ๆŸฅๆ˜ฏๅฆไธบURL"""
160
+ try:
161
+ result = urlparse(input_str)
162
+ return all([result.scheme, result.netloc]) and result.scheme in ['http', 'https']
163
+ except:
164
+ return False
165
+
166
+ @staticmethod
167
+ async def _download(url: str) -> Tuple[str, str]:
168
+ """ไธ‹่ฝฝๅ›พ็‰‡ๅนถ่ฝฌBase64
169
+
170
+ Returns:
171
+ (base64_string, mime_type) ๅ…ƒ็ป„
172
+ """
173
+ try:
174
+ async with AsyncSession() as session:
175
+ response = await session.get(url, timeout=5)
176
+ response.raise_for_status()
177
+
178
+ content_type = response.headers.get('content-type', DEFAULT_MIME)
179
+ if not content_type.startswith('image/'):
180
+ content_type = DEFAULT_MIME
181
+
182
+ b64 = base64.b64encode(response.content).decode()
183
+ return b64, content_type
184
+ except Exception as e:
185
+ logger.warning(f"[Upload] ไธ‹่ฝฝๅคฑ่ดฅ: {e}")
186
+ return "", ""
187
+
188
+ @staticmethod
189
+ def _get_info(image_data: str, mime_type: Optional[str] = None) -> Tuple[str, str]:
190
+ """่Žทๅ–ๆ–‡ไปถๅๅ’ŒMIME็ฑปๅž‹
191
+
192
+ Returns:
193
+ (file_name, mime_type) ๅ…ƒ็ป„
194
+ """
195
+ # ๅทฒๆไพ›MIME็ฑปๅž‹
196
+ if mime_type:
197
+ ext = mime_type.split("/")[1] if "/" in mime_type else DEFAULT_EXT
198
+ return f"image.{ext}", mime_type
199
+
200
+ # ไปŽBase64ๆๅ–
201
+ mime = DEFAULT_MIME
202
+ ext = DEFAULT_EXT
203
+
204
+ if "data:image" in image_data:
205
+ if match := re.search(r"data:([a-zA-Z0-9]+/[a-zA-Z0-9-.+]+);base64,", image_data):
206
+ mime = match.group(1)
207
+ ext = mime.split("/")[1]
208
+
209
+ return f"image.{ext}", mime
app/services/mcp/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """MCPๆจกๅ—ๅˆๅง‹ๅŒ–"""
3
+
4
+ from app.services.mcp.server import mcp
5
+
6
+ __all__ = ["mcp"]
app/services/mcp/server.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """FastMCPๆœๅŠกๅ™จๅฎžไพ‹"""
3
+
4
+ from fastmcp import FastMCP
5
+ from fastmcp.server.auth.providers.jwt import StaticTokenVerifier
6
+ from app.services.mcp.tools import ask_grok_impl
7
+ from app.core.config import setting
8
+
9
+
10
+ def create_mcp_server() -> FastMCP:
11
+ """ๅˆ›ๅปบMCPๆœๅŠกๅ™จๅฎžไพ‹๏ผŒๅฆ‚ๆžœ้…็ฝฎไบ†APIๅฏ†้’ฅๅˆ™ๅฏ็”จ่ฎค่ฏ"""
12
+ # ๆฃ€ๆŸฅๆ˜ฏๅฆ้…็ฝฎไบ†APIๅฏ†้’ฅ
13
+ api_key = setting.grok_config.get("api_key")
14
+
15
+ # ๅฆ‚ๆžœ้…็ฝฎไบ†APIๅฏ†้’ฅ๏ผŒๅˆ™ๅฏ็”จ้™ๆ€token้ชŒ่ฏ
16
+ auth = None
17
+ if api_key:
18
+ auth = StaticTokenVerifier(
19
+ tokens={
20
+ api_key: {
21
+ "client_id": "grok2api-client",
22
+ "scopes": ["read", "write", "admin"]
23
+ }
24
+ },
25
+ required_scopes=["read"]
26
+ )
27
+
28
+ # ๅˆ›ๅปบFastMCPๅฎžไพ‹
29
+ return FastMCP(
30
+ name="Grok2API-MCP",
31
+ instructions="MCP server providing Grok AI chat capabilities. Use ask_grok tool to interact with Grok AI models.",
32
+ auth=auth
33
+ )
34
+
35
+
36
+ # ๅˆ›ๅปบๅ…จๅฑ€MCPๅฎžไพ‹
37
+ mcp = create_mcp_server()
38
+
39
+
40
+ # ๆณจๅ†Œask_grokๅทฅๅ…ท
41
+ @mcp.tool
42
+ async def ask_grok(
43
+ query: str,
44
+ model: str = "grok-3-fast",
45
+ system_prompt: str = None
46
+ ) -> str:
47
+ """
48
+ ่ฐƒ็”จGrok AI่ฟ›่กŒๅฏน่ฏ๏ผŒๅฐคๅ…ถ้€‚็”จไบŽๅฝ“็”จๆˆท่ฏข้—ฎๆœ€ๆ–ฐไฟกๆฏ๏ผŒ้œ€่ฆ่ฐƒ็”จๆœ็ดขๅŠŸ่ƒฝ๏ผŒๆˆ–ๆ˜ฏๆƒณไบ†่งฃ็คพไบคๅนณๅฐๅŠจๆ€๏ผˆๅฆ‚Twitter(X)ใ€Reddit็ญ‰๏ผ‰ๆ—ถใ€‚
49
+
50
+ Args:
51
+ query: ็”จๆˆท็š„้—ฎ้ข˜ๆˆ–ๆŒ‡ไปค
52
+ model: Grokๆจกๅž‹ๅ็งฐ,ๅฏ้€‰ๅ€ผ: grok-3-fast(้ป˜่ฎค), grok-4-fast, grok-4-fast-expert, grok-4-expert, grok-4-heavy
53
+ system_prompt: ๅฏ้€‰็š„็ณป็ปŸๆ็คบ่ฏ,็”จไบŽ่ฎพๅฎšAI็š„่ง’่‰ฒๆˆ–่กŒไธบ็บฆๆŸ
54
+
55
+ Returns:
56
+ Grok AI็š„ๅฎŒๆ•ดๅ›žๅคๅ†…ๅฎน,ๅฏ่ƒฝๅŒ…ๆ‹ฌๆ–‡ๆœฌๅ’Œๅ›พ็‰‡้“พๆŽฅ(Markdownๆ ผๅผ)
57
+
58
+ Examples:
59
+ - ็ฎ€ๅ•้—ฎ็ญ”: ask_grok("ไป€ไนˆๆ˜ฏPython?")
60
+ - ๆŒ‡ๅฎšๆจกๅž‹: ask_grok("่งฃ้‡Š้‡ๅญ่ฎก็ฎ—", model="grok-4-fast")
61
+ - ๅธฆ็ณป็ปŸๆ็คบ: ask_grok("ๅ†™ไธ€้ฆ–่ฏ—", system_prompt="ไฝ ๆ˜ฏไธ€ไฝๅคๅ…ธ่ฏ—ไบบ")
62
+ """
63
+ return await ask_grok_impl(query, model, system_prompt)
app/services/mcp/tools.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """MCP Tools - Grok AI ๅฏน่ฏๅทฅๅ…ท"""
3
+
4
+ import json
5
+ from typing import Optional
6
+ from app.services.grok.client import GrokClient
7
+ from app.core.logger import logger
8
+ from app.core.exception import GrokApiException
9
+
10
+
11
+ async def ask_grok_impl(
12
+ query: str,
13
+ model: str = "grok-3-fast",
14
+ system_prompt: Optional[str] = None
15
+ ) -> str:
16
+ """
17
+ ๅ†…้ƒจๅฎž็Žฐ: ่ฐƒ็”จGrok APIๅนถๆ”ถ้›†ๅฎŒๆ•ดๅ“ๅบ”
18
+
19
+ Args:
20
+ query: ็”จๆˆท้—ฎ้ข˜
21
+ model: ๆจกๅž‹ๅ็งฐ
22
+ system_prompt: ็ณป็ปŸๆ็คบ่ฏ
23
+
24
+ Returns:
25
+ str: ๅฎŒๆ•ด็š„Grokๅ“ๅบ”ๅ†…ๅฎน
26
+ """
27
+ try:
28
+ # ๆž„ๅปบๆถˆๆฏๅˆ—่กจ
29
+ messages = []
30
+ if system_prompt:
31
+ messages.append({"role": "system", "content": system_prompt})
32
+ messages.append({"role": "user", "content": query})
33
+
34
+ # ๆž„ๅปบ่ฏทๆฑ‚
35
+ request_data = {
36
+ "model": model,
37
+ "messages": messages,
38
+ "stream": True
39
+ }
40
+
41
+ logger.info(f"[MCP] ask_grok ่ฐƒ็”จ, ๆจกๅž‹: {model}")
42
+
43
+ # ่ฐƒ็”จGrokๅฎขๆˆท็ซฏ(ๆตๅผ)
44
+ response_iterator = await GrokClient.openai_to_grok(request_data)
45
+
46
+ # ๆ”ถ้›†ๆ‰€ๆœ‰ๆตๅผๅ“ๅบ”ๅ—
47
+ content_parts = []
48
+ async for chunk in response_iterator:
49
+ if isinstance(chunk, bytes):
50
+ chunk = chunk.decode('utf-8')
51
+
52
+ # ่งฃๆžSSEๆ ผๅผ
53
+ if chunk.startswith("data: "):
54
+ data_str = chunk[6:].strip()
55
+ if data_str == "[DONE]":
56
+ break
57
+
58
+ try:
59
+ data = json.loads(data_str)
60
+ choices = data.get("choices", [])
61
+ if choices:
62
+ delta = choices[0].get("delta", {})
63
+ if content := delta.get("content"):
64
+ content_parts.append(content)
65
+ except json.JSONDecodeError:
66
+ continue
67
+
68
+ result = "".join(content_parts)
69
+ logger.info(f"[MCP] ask_grok ๅฎŒๆˆ, ๅ“ๅบ”้•ฟๅบฆ: {len(result)}")
70
+ return result
71
+
72
+ except GrokApiException as e:
73
+ logger.error(f"[MCP] Grok API้”™่ฏฏ: {str(e)}")
74
+ raise Exception(f"Grok API่ฐƒ็”จๅคฑ่ดฅ: {str(e)}")
75
+ except Exception as e:
76
+ logger.error(f"[MCP] ask_grokๅผ‚ๅธธ: {str(e)}", exc_info=True)
77
+ raise Exception(f"ๅค„็†่ฏทๆฑ‚ๆ—ถๅ‡บ้”™: {str(e)}")
app/services/request_logger.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """่ฏทๆฑ‚ๆ—ฅๅฟ—ๅฎก่ฎก - ่ฎฐๅฝ•่ฟ‘ๆœŸ่ฏทๆฑ‚"""
2
+
3
+ import time
4
+ import asyncio
5
+ import orjson
6
+ from typing import List, Dict, Deque
7
+ from collections import deque
8
+ from dataclasses import dataclass, asdict
9
+ from pathlib import Path
10
+
11
+ from app.core.logger import logger
12
+
13
+ @dataclass
14
+ class RequestLog:
15
+ id: str
16
+ time: str
17
+ timestamp: float
18
+ ip: str
19
+ model: str
20
+ duration: float
21
+ status: int
22
+ key_name: str
23
+ token_suffix: str
24
+ error: str = ""
25
+
26
+ class RequestLogger:
27
+ """่ฏทๆฑ‚ๆ—ฅๅฟ—่ฎฐๅฝ•ๅ™จ"""
28
+
29
+ _instance = None
30
+
31
+ def __new__(cls):
32
+ if cls._instance is None:
33
+ cls._instance = super().__new__(cls)
34
+ return cls._instance
35
+
36
+ def __init__(self, max_len: int = 1000):
37
+ if hasattr(self, '_initialized'):
38
+ return
39
+
40
+ self.file_path = Path(__file__).parents[2] / "data" / "logs.json"
41
+ self._logs: Deque[Dict] = deque(maxlen=max_len)
42
+ self._lock = asyncio.Lock()
43
+ self._loaded = False
44
+
45
+ self._initialized = True
46
+
47
+ async def init(self):
48
+ """ๅˆๅง‹ๅŒ–ๅŠ ่ฝฝๆ•ฐๆฎ"""
49
+ if not self._loaded:
50
+ await self._load_data()
51
+
52
+ async def _load_data(self):
53
+ """ไปŽ็ฃ็›˜ๅŠ ่ฝฝๆ—ฅๅฟ—ๆ•ฐๆฎ"""
54
+ if self._loaded:
55
+ return
56
+
57
+ if not self.file_path.exists():
58
+ self._loaded = True
59
+ return
60
+
61
+ try:
62
+ async with self._lock:
63
+ content = await asyncio.to_thread(self.file_path.read_bytes)
64
+ if content:
65
+ data = orjson.loads(content)
66
+ if isinstance(data, list):
67
+ self._logs.clear()
68
+ self._logs.extend(data)
69
+ self._loaded = True
70
+ logger.debug(f"[Logger] ๅŠ ่ฝฝๆ—ฅๅฟ—ๆˆๅŠŸ: {len(self._logs)} ๆก")
71
+ except Exception as e:
72
+ logger.error(f"[Logger] ๅŠ ่ฝฝๆ—ฅๅฟ—ๅคฑ่ดฅ: {e}")
73
+ self._loaded = True
74
+
75
+ async def _save_data(self):
76
+ """ไฟๅญ˜ๆ—ฅๅฟ—ๆ•ฐๆฎๅˆฐ็ฃ็›˜"""
77
+ if not self._loaded:
78
+ return
79
+
80
+ try:
81
+ # ็กฎไฟ็›ฎๅฝ•ๅญ˜ๅœจ
82
+ self.file_path.parent.mkdir(parents=True, exist_ok=True)
83
+
84
+ async with self._lock:
85
+ # ่ฝฌๆขไธบๅˆ—่กจไฟๅญ˜
86
+ content = orjson.dumps(list(self._logs))
87
+ await asyncio.to_thread(self.file_path.write_bytes, content)
88
+ except Exception as e:
89
+ logger.error(f"[Logger] ไฟๅญ˜ๆ—ฅๅฟ—ๅคฑ่ดฅ: {e}")
90
+
91
+ async def add_log(self,
92
+ ip: str,
93
+ model: str,
94
+ duration: float,
95
+ status: int,
96
+ key_name: str,
97
+ token_suffix: str = "",
98
+ error: str = ""):
99
+ """ๆทปๅŠ ๆ—ฅๅฟ—"""
100
+ if not self._loaded:
101
+ await self.init()
102
+
103
+ try:
104
+ now = time.time()
105
+ # ๆ ผๅผๅŒ–ๆ—ถ้—ด
106
+ time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(now))
107
+
108
+ log = {
109
+ "id": str(int(now * 1000)),
110
+ "time": time_str,
111
+ "timestamp": now,
112
+ "ip": ip,
113
+ "model": model,
114
+ "duration": round(duration, 2),
115
+ "status": status,
116
+ "key_name": key_name,
117
+ "token_suffix": token_suffix,
118
+ "error": error
119
+ }
120
+
121
+ async with self._lock:
122
+ self._logs.appendleft(log) # ๆœ€ๆ–ฐ็š„ๅœจๅ‰
123
+
124
+ # ๅผ‚ๆญฅไฟๅญ˜
125
+ asyncio.create_task(self._save_data())
126
+
127
+ except Exception as e:
128
+ logger.error(f"[Logger] ่ฎฐๅฝ•ๆ—ฅๅฟ—ๅคฑ่ดฅ: {e}")
129
+
130
+ async def get_logs(self, limit: int = 1000) -> List[Dict]:
131
+ """่Žทๅ–ๆ—ฅๅฟ—"""
132
+ async with self._lock:
133
+ return list(self._logs)[:limit]
134
+
135
+ async def clear_logs(self):
136
+ """ๆธ…็ฉบๆ—ฅๅฟ—"""
137
+ async with self._lock:
138
+ self._logs.clear()
139
+ await self._save_data()
140
+
141
+
142
+ # ๅ…จๅฑ€ๅฎžไพ‹
143
+ request_logger = RequestLogger()
app/services/request_stats.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """่ฏทๆฑ‚็ปŸ่ฎกๆจกๅ— - ๆŒ‰ๅฐๆ—ถ/ๅคฉ็ปŸ่ฎก่ฏทๆฑ‚ๆ•ฐๆฎ"""
2
+
3
+ import time
4
+ import asyncio
5
+ import orjson
6
+ from datetime import datetime
7
+ from typing import Dict, Any
8
+ from pathlib import Path
9
+ from collections import defaultdict
10
+
11
+ from app.core.logger import logger
12
+
13
+
14
+ class RequestStats:
15
+ """่ฏทๆฑ‚็ปŸ่ฎก็ฎก็†ๅ™จ๏ผˆๅ•ไพ‹๏ผ‰"""
16
+
17
+ _instance = None
18
+
19
+ def __new__(cls):
20
+ if cls._instance is None:
21
+ cls._instance = super().__new__(cls)
22
+ return cls._instance
23
+
24
+ def __init__(self):
25
+ if hasattr(self, '_initialized'):
26
+ return
27
+
28
+ self.file_path = Path(__file__).parents[2] / "data" / "stats.json"
29
+
30
+ # ็ปŸ่ฎกๆ•ฐๆฎ
31
+ self._hourly: Dict[str, Dict[str, int]] = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
32
+ self._daily: Dict[str, Dict[str, int]] = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
33
+ self._models: Dict[str, int] = defaultdict(int)
34
+
35
+ # ไฟ็•™็ญ–็•ฅ
36
+ self._hourly_keep = 48 # ไฟ็•™48ๅฐๆ—ถ
37
+ self._daily_keep = 30 # ไฟ็•™30ๅคฉ
38
+
39
+ self._lock = asyncio.Lock()
40
+ self._loaded = False
41
+ self._initialized = True
42
+
43
+ async def init(self):
44
+ """ๅˆๅง‹ๅŒ–ๅŠ ่ฝฝๆ•ฐๆฎ"""
45
+ if not self._loaded:
46
+ await self._load_data()
47
+
48
+ async def _load_data(self):
49
+ """ไปŽ็ฃ็›˜ๅŠ ่ฝฝ็ปŸ่ฎกๆ•ฐๆฎ"""
50
+ if self._loaded:
51
+ return
52
+
53
+ if not self.file_path.exists():
54
+ self._loaded = True
55
+ return
56
+
57
+ try:
58
+ async with self._lock:
59
+ content = await asyncio.to_thread(self.file_path.read_bytes)
60
+ if content:
61
+ data = orjson.loads(content)
62
+
63
+ # ๆขๅค defaultdict ็ป“ๆž„
64
+ self._hourly = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
65
+ self._hourly.update(data.get("hourly", {}))
66
+
67
+ self._daily = defaultdict(lambda: {"total": 0, "success": 0, "failed": 0})
68
+ self._daily.update(data.get("daily", {}))
69
+
70
+ self._models = defaultdict(int)
71
+ self._models.update(data.get("models", {}))
72
+
73
+ self._loaded = True
74
+ logger.debug(f"[Stats] ๅŠ ่ฝฝ็ปŸ่ฎกๆ•ฐๆฎๆˆๅŠŸ")
75
+ except Exception as e:
76
+ logger.error(f"[Stats] ๅŠ ่ฝฝๆ•ฐๆฎๅคฑ่ดฅ: {e}")
77
+ self._loaded = True # ้˜ฒๆญข่ฆ†็›–
78
+
79
+ async def _save_data(self):
80
+ """ไฟๅญ˜็ปŸ่ฎกๆ•ฐๆฎๅˆฐ็ฃ็›˜"""
81
+ if not self._loaded:
82
+ return
83
+
84
+ try:
85
+ # ็กฎไฟ็›ฎๅฝ•ๅญ˜ๅœจ
86
+ self.file_path.parent.mkdir(parents=True, exist_ok=True)
87
+
88
+ async with self._lock:
89
+ data = {
90
+ "hourly": dict(self._hourly),
91
+ "daily": dict(self._daily),
92
+ "models": dict(self._models)
93
+ }
94
+ content = orjson.dumps(data)
95
+ await asyncio.to_thread(self.file_path.write_bytes, content)
96
+ except Exception as e:
97
+ logger.error(f"[Stats] ไฟๅญ˜ๆ•ฐๆฎๅคฑ่ดฅ: {e}")
98
+
99
+ async def record_request(self, model: str, success: bool) -> None:
100
+ """่ฎฐๅฝ•ไธ€ๆฌก่ฏทๆฑ‚"""
101
+ if not self._loaded:
102
+ await self.init()
103
+
104
+ now = datetime.now()
105
+ hour_key = now.strftime("%Y-%m-%dT%H")
106
+ day_key = now.strftime("%Y-%m-%d")
107
+
108
+ # ๅฐๆ—ถ็ปŸ่ฎก
109
+ self._hourly[hour_key]["total"] += 1
110
+ if success:
111
+ self._hourly[hour_key]["success"] += 1
112
+ else:
113
+ self._hourly[hour_key]["failed"] += 1
114
+
115
+ # ๅคฉ็ปŸ่ฎก
116
+ self._daily[day_key]["total"] += 1
117
+ if success:
118
+ self._daily[day_key]["success"] += 1
119
+ else:
120
+ self._daily[day_key]["failed"] += 1
121
+
122
+ # ๆจกๅž‹็ปŸ่ฎก
123
+ self._models[model] += 1
124
+
125
+ # ๅฎšๆœŸๆธ…็†ๆ—งๆ•ฐๆฎ
126
+ self._cleanup()
127
+
128
+ # ๅผ‚ๆญฅไฟๅญ˜
129
+ asyncio.create_task(self._save_data())
130
+
131
+ def _cleanup(self) -> None:
132
+ """ๆธ…็†่ฟ‡ๆœŸๆ•ฐๆฎ"""
133
+ now = datetime.now()
134
+
135
+ # ๆธ…็†ๅฐๆ—ถๆ•ฐๆฎ
136
+ hour_keys = list(self._hourly.keys())
137
+ if len(hour_keys) > self._hourly_keep:
138
+ for key in sorted(hour_keys)[:-self._hourly_keep]:
139
+ del self._hourly[key]
140
+
141
+ # ๆธ…็†ๅคฉๆ•ฐๆฎ
142
+ day_keys = list(self._daily.keys())
143
+ if len(day_keys) > self._daily_keep:
144
+ for key in sorted(day_keys)[:-self._daily_keep]:
145
+ del self._daily[key]
146
+
147
+ def get_stats(self, hours: int = 24, days: int = 7) -> Dict[str, Any]:
148
+ """่Žทๅ–็ปŸ่ฎกๆ•ฐๆฎ"""
149
+ now = datetime.now()
150
+
151
+ # ่Žทๅ–ๆœ€่ฟ‘Nๅฐๆ—ถๆ•ฐๆฎ
152
+ hourly_data = []
153
+ for i in range(hours - 1, -1, -1):
154
+ from datetime import timedelta
155
+ dt = now - timedelta(hours=i)
156
+ key = dt.strftime("%Y-%m-%dT%H")
157
+ data = self._hourly.get(key, {"total": 0, "success": 0, "failed": 0})
158
+ hourly_data.append({
159
+ "hour": dt.strftime("%H:00"),
160
+ "date": dt.strftime("%m-%d"),
161
+ **data
162
+ })
163
+
164
+ # ่Žทๅ–ๆœ€่ฟ‘Nๅคฉๆ•ฐๆฎ
165
+ daily_data = []
166
+ for i in range(days - 1, -1, -1):
167
+ from datetime import timedelta
168
+ dt = now - timedelta(days=i)
169
+ key = dt.strftime("%Y-%m-%d")
170
+ data = self._daily.get(key, {"total": 0, "success": 0, "failed": 0})
171
+ daily_data.append({
172
+ "date": dt.strftime("%m-%d"),
173
+ **data
174
+ })
175
+
176
+ # ๆจกๅž‹็ปŸ่ฎก๏ผˆๅ– Top 10๏ผ‰
177
+ model_data = sorted(self._models.items(), key=lambda x: x[1], reverse=True)[:10]
178
+
179
+ # ๆ€ป่ฎก
180
+ total_requests = sum(d["total"] for d in self._hourly.values())
181
+ total_success = sum(d["success"] for d in self._hourly.values())
182
+ total_failed = sum(d["failed"] for d in self._hourly.values())
183
+
184
+ return {
185
+ "hourly": hourly_data,
186
+ "daily": daily_data,
187
+ "models": [{"model": m, "count": c} for m, c in model_data],
188
+ "summary": {
189
+ "total": total_requests,
190
+ "success": total_success,
191
+ "failed": total_failed,
192
+ "success_rate": round(total_success / total_requests * 100, 1) if total_requests > 0 else 0
193
+ }
194
+ }
195
+
196
+ async def reset(self) -> None:
197
+ """้‡็ฝฎๆ‰€ๆœ‰็ปŸ่ฎก"""
198
+ self._hourly.clear()
199
+ self._daily.clear()
200
+ self._models.clear()
201
+ await self._save_data()
202
+
203
+
204
+ # ๅ…จๅฑ€ๅฎžไพ‹
205
+ request_stats = RequestStats()
app/template/admin.html ADDED
The diff for this file is too large to render. See raw diff
 
app/template/favicon.png ADDED

Git LFS Details

  • SHA256: d92973dce171d4f418c4c9a883cd754c6e9828a0f1f7cdf7af19d4896b852306
  • Pointer size: 131 Bytes
  • Size of remote file: 104 kB
app/template/login.html ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="zh-CN" class="h-full">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>็™ปๅฝ• - Grok2API</title>
8
+ <link rel="icon" type="image/png" href="/static/favicon.png">
9
+ <script src="https://cdn.tailwindcss.com"></script>
10
+ <script>
11
+ tailwind.config = { theme: { extend: { colors: { border: "hsl(0 0% 89%)", input: "hsl(0 0% 89%)", ring: "hsl(0 0% 3.9%)", background: "hsl(0 0% 100%)", foreground: "hsl(0 0% 3.9%)", primary: { DEFAULT: "hsl(0 0% 9%)", foreground: "hsl(0 0% 98%)" }, secondary: { DEFAULT: "hsl(0 0% 96.1%)", foreground: "hsl(0 0% 9%)" }, muted: { DEFAULT: "hsl(0 0% 96.1%)", foreground: "hsl(0 0% 45.1%)" }, destructive: { DEFAULT: "hsl(0 84.2% 60.2%)", foreground: "hsl(0 0% 98%)" } } } } }
12
+ </script>
13
+ <style>
14
+ @keyframes slide-up {
15
+ from {
16
+ transform: translateY(100%);
17
+ opacity: 0
18
+ }
19
+
20
+ to {
21
+ transform: translateY(0);
22
+ opacity: 1
23
+ }
24
+ }
25
+
26
+ .animate-slide-up {
27
+ animation: slide-up .3s ease-out
28
+ }
29
+ </style>
30
+ </head>
31
+
32
+ <body class="h-full bg-background text-foreground antialiased">
33
+ <div class="flex min-h-full flex-col justify-center py-12 px-4 sm:px-6 lg:px-8">
34
+ <div class="sm:mx-auto sm:w-full sm:max-w-md">
35
+ <div class="text-center">
36
+ <h1 class="text-4xl font-bold">Grok2API</h1>
37
+ <p class="mt-2 text-sm text-muted-foreground">็ฎก็†ๅ‘˜ๆŽงๅˆถๅฐ</p>
38
+ </div>
39
+ </div>
40
+
41
+ <div class="sm:mx-auto sm:w-full sm:max-w-md">
42
+ <div class="bg-background py-8 px-4 sm:px-10 rounded-lg">
43
+ <form id="loginForm" class="space-y-6">
44
+ <div class="space-y-2">
45
+ <label for="username" class="text-sm font-medium">่ดฆๆˆท</label>
46
+ <input type="text" id="username" name="username" required
47
+ class="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring disabled:opacity-50"
48
+ placeholder="่ฏท่พ“ๅ…ฅ่ดฆๆˆท">
49
+ </div>
50
+ <div class="space-y-2">
51
+ <label for="password" class="text-sm font-medium">ๅฏ†็ </label>
52
+ <input type="password" id="password" name="password" required
53
+ class="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring disabled:opacity-50"
54
+ placeholder="่ฏท่พ“ๅ…ฅๅฏ†็ ">
55
+ </div>
56
+ <button type="submit" id="loginButton"
57
+ class="inline-flex items-center justify-center rounded-md font-medium transition-colors bg-primary text-primary-foreground hover:bg-primary/90 h-10 w-full disabled:opacity-50">็™ปๅฝ•</button>
58
+ </form>
59
+
60
+ <div class="mt-6 text-center text-xs text-muted-foreground space-y-1">
61
+ <p>Created By Chenyme ยฉ 2025</p>
62
+ <p>Fork ็ปดๆŠค: @Tomiya233</p>
63
+ </div>
64
+ </div>
65
+ </div>
66
+ </div>
67
+
68
+ <script>
69
+ const form = document.getElementById('loginForm'), btn = document.getElementById('loginButton');
70
+ form.addEventListener('submit', async (e) => { e.preventDefault(); btn.disabled = true; btn.textContent = '็™ปๅฝ•ไธญ...'; try { const fd = new FormData(form), r = await fetch('/api/login', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ username: fd.get('username'), password: fd.get('password') }) }); const d = await r.json(); d.success ? (localStorage.setItem('adminToken', d.token), location.href = '/manage') : showToast(d.message || '็™ปๅฝ•ๅคฑ่ดฅ', 'error') } catch (e) { showToast('็ฝ‘็ปœ้”™่ฏฏ๏ผŒ่ฏท็จๅŽ้‡่ฏ•', 'error') } finally { btn.disabled = false; btn.textContent = '็™ปๅฝ•' } });
71
+ function showToast(m, t = 'error') { const d = document.createElement('div'), bc = { success: 'bg-green-600', error: 'bg-destructive', info: 'bg-primary' }; d.className = `fixed bottom-4 right-4 ${bc[t] || bc.error} text-white px-4 py-2.5 rounded-lg shadow-lg text-sm font-medium z-50 animate-slide-up`; d.textContent = m; document.body.appendChild(d); setTimeout(() => { d.style.opacity = '0'; d.style.transition = 'opacity .3s'; setTimeout(() => d.parentNode && document.body.removeChild(d), 300) }, 2000) }
72
+ window.addEventListener('DOMContentLoaded', () => { const t = localStorage.getItem('adminToken'); t && fetch('/api/stats', { headers: { Authorization: `Bearer ${t}` } }).then(r => { if (r.ok) location.href = '/manage' }) });
73
+ </script>
74
+ </body>
75
+
76
+ </html>
data/setting.toml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [grok]
2
+ api_key = ""
3
+ proxy_url = ""
4
+ cache_proxy_url = ""
5
+ cf_clearance = ""
6
+ x_statsig_id = "ZTpUeXBlRXJyb3I6IENhbm5vdCByZWFkIHByb3BlcnRpZXMgb2YgdW5kZWZpbmVkIChyZWFkaW5nICdjaGlsZE5vZGVzJyk="
7
+ filtered_tags = "xaiartifact,xai:tool_usage_card,grok:render"
8
+ stream_chunk_timeout = 120
9
+ stream_total_timeout = 600
10
+ stream_first_response_timeout = 30
11
+ temporary = true
12
+ show_thinking = true
13
+ dynamic_statsig = true
14
+ proxy_pool_url = ""
15
+ proxy_pool_interval = 300
16
+ retry_status_codes = [ 401, 429,]
17
+
18
+ [global]
19
+ base_url = ""
20
+ log_level = "INFO"
21
+ image_mode = "url"
22
+ admin_password = "admin"
23
+ admin_username = "admin"
24
+ image_cache_max_size_mb = 512
25
+ video_cache_max_size_mb = 1024
data/temp/image.temp ADDED
File without changes
data/temp/video/users-8522ce45-679b-4e0e-a0f7-bb18f434eb6b-generated-15f7113f-5d16-4ff1-bdaa-a2eabd66671c-generated_video.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fc94cc9cb116f2e561baa62d006d5a8c96a30dc7671acbcbcf7f4b6de11696a
3
+ size 313620
data/token.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "ssoSuper": {},
3
+ "ssoNormal": {}
4
+ }
docker-compose.yml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ services:
2
+ grok2api:
3
+ image: ghcr.io/chenyme/grok2api:latest
4
+ ports:
5
+ - "8000:8000"
6
+ volumes:
7
+ - grok_data:/app/data
8
+ - ./logs:/app/logs
9
+ environment:
10
+ # ===== ๅญ˜ๅ‚จๆจกๅผ =====
11
+ # ๆ”ฏๆŒ file, mysql ๆˆ– redis
12
+ - STORAGE_MODE=file
13
+
14
+ # ===== ๆ•ฐๆฎๅบ“ =====
15
+ # ไป…ๅœจSTORAGE_MODE=mysqlๆˆ–redisๆ—ถ้œ€่ฆ
16
+ # - DATABASE_URL=mysql://user:password@host:3306/grok2api
17
+ # MySQLๆ ผๅผ: mysql://user:password@host:port/database
18
+ # Redisๆ ผๅผ: redis://host:port/db ๆˆ– redis://user:password@host:port/db
19
+
20
+ # ===== Workerๆ•ฐ้‡ =====
21
+ # ้ป˜่ฎค1๏ผŒๅปบ่ฎฎ๏ผšCPUๆ ธๅฟƒๆ•ฐ * 2๏ผŒๅคš่ฟ›็จ‹ๆจกๅผไธ‹ๅปบ่ฎฎไฝฟ็”จMySQL/Redisๅญ˜ๅ‚จ
22
+ - WORKERS=1
23
+
24
+ volumes:
25
+ grok_data:
docker-entrypoint.sh ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+ set -e
3
+
4
+ # ๅˆๅง‹ๅŒ–้…็ฝฎๆ–‡ไปถ๏ผˆๅฆ‚ๆžœไธๅญ˜ๅœจ๏ผ‰
5
+ echo "[Grok2API] ๆฃ€ๆŸฅ้…็ฝฎๆ–‡ไปถ..."
6
+
7
+ # ็กฎไฟๆ•ฐๆฎ็›ฎๅฝ•ๅญ˜ๅœจ
8
+ mkdir -p /app/data/temp/image /app/data/temp/video /app/logs
9
+
10
+ # ๅฆ‚ๆžœ setting.toml ไธๅญ˜ๅœจ๏ผŒๅˆ›ๅปบ้ป˜่ฎค้…็ฝฎ
11
+ if [ ! -f /app/data/setting.toml ]; then
12
+ echo "[Grok2API] ๅˆๅง‹ๅŒ– setting.toml..."
13
+ cat > /app/data/setting.toml << 'EOF'
14
+ [global]
15
+ base_url = "http://localhost:8000"
16
+ log_level = "INFO"
17
+ image_mode = "url"
18
+ admin_password = "admin"
19
+ admin_username = "admin"
20
+ image_cache_max_size_mb = 512
21
+ video_cache_max_size_mb = 1024
22
+ max_upload_concurrency = 20
23
+ max_request_concurrency = 50
24
+ batch_save_interval = 1.0
25
+ batch_save_threshold = 10
26
+
27
+ [grok]
28
+ api_key = ""
29
+ proxy_url = ""
30
+ cache_proxy_url = ""
31
+ cf_clearance = ""
32
+ x_statsig_id = ""
33
+ dynamic_statsig = true
34
+ filtered_tags = "xaiartifact,xai:tool_usage_card,grok:render"
35
+ stream_chunk_timeout = 120
36
+ stream_total_timeout = 600
37
+ stream_first_response_timeout = 30
38
+ temporary = true
39
+ show_thinking = true
40
+ proxy_pool_url = ""
41
+ proxy_pool_interval = 300
42
+ retry_status_codes = [401, 429]
43
+ EOF
44
+ fi
45
+
46
+ # ๅฆ‚ๆžœ token.json ไธๅญ˜ๅœจ๏ผŒๅˆ›ๅปบ็ฉบtokenๆ–‡ไปถ
47
+ if [ ! -f /app/data/token.json ]; then
48
+ echo "[Grok2API] ๅˆๅง‹ๅŒ– token.json..."
49
+ echo '{"ssoNormal": {}, "ssoSuper": {}}' > /app/data/token.json
50
+ fi
51
+
52
+ echo "[Grok2API] ้…็ฝฎๆ–‡ไปถๆฃ€ๆŸฅๅฎŒๆˆ"
53
+ echo "[Grok2API] ๅฏๅŠจๅบ”็”จ..."
54
+
55
+ # ๆ‰ง่กŒไผ ๅ…ฅ็š„ๅ‘ฝไปค
56
+ exec "$@"
main.py ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Grok2API"""
2
+
3
+ import os
4
+ import sys
5
+ from contextlib import asynccontextmanager
6
+ from pathlib import Path
7
+
8
+ from fastapi import FastAPI
9
+ from fastapi.middleware.cors import CORSMiddleware
10
+ from fastapi.staticfiles import StaticFiles
11
+ from app.core.logger import logger
12
+ from app.core.exception import register_exception_handlers
13
+ from app.core.storage import storage_manager
14
+ from app.core.config import setting
15
+ from app.services.grok.token import token_manager
16
+ from app.api.v1.chat import router as chat_router
17
+ from app.api.v1.models import router as models_router
18
+ from app.api.v1.images import router as images_router
19
+ from app.api.admin.manage import router as admin_router
20
+ from app.services.mcp import mcp
21
+
22
+ # 0. ๅ…ผๅฎนๆ€งๆฃ€ๆต‹
23
+ try:
24
+ if sys.platform != 'win32':
25
+ import uvloop
26
+ uvloop.install()
27
+ logger.info("[Grok2API] ๅฏ็”จuvloop้ซ˜ๆ€ง่ƒฝไบ‹ไปถๅพช็Žฏ")
28
+ else:
29
+ logger.info("[Grok2API] Windows็ณป็ปŸ๏ผŒไฝฟ็”จ้ป˜่ฎคasyncioไบ‹ไปถๅพช็Žฏ")
30
+ except ImportError:
31
+ logger.info("[Grok2API] uvloopๆœชๅฎ‰่ฃ…๏ผŒไฝฟ็”จ้ป˜่ฎคasyncioไบ‹ไปถๅพช็Žฏ")
32
+
33
+ # 1. ๅˆ›ๅปบMCP็š„FastAPIๅบ”็”จๅฎžไพ‹
34
+ mcp_app = mcp.http_app(stateless_http=True, transport="streamable-http")
35
+
36
+ # 2. ๅฎšไน‰ๅบ”็”จ็”Ÿๅ‘ฝๅ‘จๆœŸ
37
+ @asynccontextmanager
38
+ async def lifespan(app: FastAPI):
39
+ """
40
+ ๅฏๅŠจ้กบๅบ:
41
+ 1. ๅˆๅง‹ๅŒ–ๆ ธๅฟƒๆœๅŠก (storage, settings, token_manager)
42
+ 2. ๅผ‚ๆญฅๅŠ ่ฝฝ token ๆ•ฐๆฎ
43
+ 3. ๅฏๅŠจๆ‰น้‡ไฟๅญ˜ไปปๅŠก
44
+ 4. ๅฏๅŠจMCPๆœๅŠก็”Ÿๅ‘ฝๅ‘จๆœŸ
45
+
46
+ ๅ…ณ้—ญ้กบๅบ (LIFO):
47
+ 1. ๅ…ณ้—ญMCPๆœๅŠก็”Ÿๅ‘ฝๅ‘จๆœŸ
48
+ 2. ๅ…ณ้—ญๆ‰น้‡ไฟๅญ˜ไปปๅŠกๅนถๅˆทๆ–ฐๆ•ฐๆฎ
49
+ 3. ๅ…ณ้—ญๆ ธๅฟƒๆœๅŠก
50
+ """
51
+ # --- ๅฏๅŠจ่ฟ‡็จ‹ ---
52
+ # 1. ๅˆๅง‹ๅŒ–ๆ ธๅฟƒๆœๅŠก
53
+ await storage_manager.init()
54
+
55
+ # ่ฎพ็ฝฎๅญ˜ๅ‚จๅˆฐ้…็ฝฎๅ’Œtoken็ฎก็†ๅ™จ
56
+ storage = storage_manager.get_storage()
57
+ setting.set_storage(storage)
58
+ token_manager.set_storage(storage)
59
+
60
+ # 2. ้‡ๆ–ฐๅŠ ่ฝฝ้…็ฝฎ
61
+ await setting.reload()
62
+ logger.info("[Grok2API] ๆ ธๅฟƒๆœๅŠกๅˆๅง‹ๅŒ–ๅฎŒๆˆ")
63
+
64
+ # 2.5. ๅˆๅง‹ๅŒ–ไปฃ็†ๆฑ 
65
+ from app.core.proxy_pool import proxy_pool
66
+ proxy_url = setting.grok_config.get("proxy_url", "")
67
+ proxy_pool_url = setting.grok_config.get("proxy_pool_url", "")
68
+ proxy_pool_interval = setting.grok_config.get("proxy_pool_interval", 300)
69
+ proxy_pool.configure(proxy_url, proxy_pool_url, proxy_pool_interval)
70
+
71
+ # 3. ๅผ‚ๆญฅๅŠ ่ฝฝ token ๆ•ฐๆฎ
72
+ await token_manager._load_data()
73
+ logger.info("[Grok2API] Tokenๆ•ฐๆฎๅŠ ่ฝฝๅฎŒๆˆ")
74
+
75
+ # 3.5. ๅŠ ่ฝฝ API Key ๆ•ฐๆฎ
76
+ from app.services.api_keys import api_key_manager
77
+ await api_key_manager.init()
78
+ logger.info("[Grok2API] API Keyๆ•ฐๆฎๅŠ ่ฝฝๅฎŒๆˆ")
79
+
80
+ # 3.6. ๅŠ ่ฝฝ็ปŸ่ฎกๅ’Œๆ—ฅๅฟ—ๆ•ฐๆฎ
81
+ from app.services.request_stats import request_stats
82
+ from app.services.request_logger import request_logger
83
+ await request_stats.init()
84
+ await request_logger.init()
85
+ logger.info("[Grok2API] ็ปŸ่ฎกๅ’Œๆ—ฅๅฟ—ๆ•ฐๆฎๅŠ ่ฝฝๅฎŒๆˆ")
86
+
87
+ # 4. ๅฏๅŠจๆ‰น้‡ไฟๅญ˜ไปปๅŠก
88
+ await token_manager.start_batch_save()
89
+
90
+ # 5. ็ฎก็†MCPๆœๅŠก็š„็”Ÿๅ‘ฝๅ‘จๆœŸ
91
+ mcp_lifespan_context = mcp_app.lifespan(app)
92
+ await mcp_lifespan_context.__aenter__()
93
+ logger.info("[MCP] MCPๆœๅŠกๅˆๅง‹ๅŒ–ๅฎŒๆˆ")
94
+
95
+ logger.info("[Grok2API] ๅบ”็”จๅฏๅŠจๆˆๅŠŸ")
96
+
97
+ try:
98
+ yield
99
+ finally:
100
+ # --- ๅ…ณ้—ญ่ฟ‡็จ‹ ---
101
+ # 1. ้€€ๅ‡บMCPๆœๅŠก็š„็”Ÿๅ‘ฝๅ‘จๆœŸ
102
+ await mcp_lifespan_context.__aexit__(None, None, None)
103
+ logger.info("[MCP] MCPๆœๅŠกๅทฒๅ…ณ้—ญ")
104
+
105
+ # 2. ๅ…ณ้—ญๆ‰น้‡ไฟๅญ˜ไปปๅŠกๅนถๅˆทๆ–ฐๆ•ฐๆฎ
106
+ await token_manager.shutdown()
107
+ logger.info("[Token] Token็ฎก็†ๅ™จๅทฒๅ…ณ้—ญ")
108
+
109
+ # 3. ๅ…ณ้—ญๆ ธๅฟƒๆœๅŠก
110
+ await storage_manager.close()
111
+ logger.info("[Grok2API] ๅบ”็”จๅ…ณ้—ญๆˆๅŠŸ")
112
+
113
+
114
+ # ๅˆๅง‹ๅŒ–ๆ—ฅๅฟ—
115
+ logger.info("[Grok2API] ๅบ”็”จๆญฃๅœจๅฏๅŠจ...")
116
+ logger.info("[Grok2API] Fork ็‰ˆๆœฌ็ปดๆŠค: @Tomiya233")
117
+
118
+ # ๅˆ›ๅปบFastAPIๅบ”็”จ
119
+ app = FastAPI(
120
+ title="Grok2API",
121
+ description="Grok API ่ฝฌๆขๆœๅŠก",
122
+ version="1.3.1",
123
+ lifespan=lifespan
124
+ )
125
+
126
+ # ๆณจๅ†Œๅ…จๅฑ€ๅผ‚ๅธธๅค„็†ๅ™จ
127
+ register_exception_handlers(app)
128
+
129
+ # ๆณจๅ†Œ่ทฏ็”ฑ
130
+ app.include_router(chat_router, prefix="/v1")
131
+ app.include_router(models_router, prefix="/v1")
132
+ app.include_router(images_router)
133
+ app.include_router(admin_router)
134
+
135
+ # ๆŒ‚่ฝฝ้™ๆ€ๆ–‡ไปถ
136
+ app.mount("/static", StaticFiles(directory="app/template"), name="template")
137
+
138
+ @app.get("/")
139
+ async def root():
140
+ """ๆ น่ทฏๅพ„"""
141
+ from fastapi.responses import RedirectResponse
142
+ return RedirectResponse(url="/login")
143
+
144
+
145
+ @app.get("/health")
146
+ async def health_check():
147
+ """ๅฅๅบทๆฃ€ๆŸฅๆŽฅๅฃ"""
148
+ return {
149
+ "status": "healthy",
150
+ "service": "Grok2API",
151
+ "version": "1.0.3"
152
+ }
153
+
154
+ # ๆŒ‚่ฝฝMCPๆœๅŠกๅ™จ
155
+ app.mount("", mcp_app)
156
+
157
+
158
+ if __name__ == "__main__":
159
+ import uvicorn
160
+ import os
161
+
162
+ # ่ฏปๅ– worker ๆ•ฐ้‡๏ผŒ้ป˜่ฎคไธบ 1
163
+ workers = int(os.getenv("WORKERS", "1"))
164
+
165
+ # ๆ็คบๅคš่ฟ›็จ‹ๆจกๅผ
166
+ if workers > 1:
167
+ logger.info(
168
+ f"[Grok2API] ๅคš่ฟ›็จ‹ๆจกๅผๅทฒๅฏ็”จ (workers={workers})ใ€‚"
169
+ f"ๅปบ่ฎฎไฝฟ็”จ Redis/MySQL ๅญ˜ๅ‚จไปฅ่Žทๅพ—ๆœ€ไฝณๆ€ง่ƒฝใ€‚"
170
+ )
171
+
172
+ # ็กฎๅฎšไบ‹ไปถๅพช็Žฏ็ฑปๅž‹
173
+ loop_type = "auto"
174
+ if workers == 1 and sys.platform != 'win32':
175
+ try:
176
+ import uvloop
177
+ loop_type = "uvloop"
178
+ except ImportError:
179
+ pass
180
+
181
+ uvicorn.run(
182
+ "main:app",
183
+ host="0.0.0.0",
184
+ port=8000,
185
+ workers=workers,
186
+ loop=loop_type
187
+ )
pyproject.toml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "grok2api"
3
+ version = "1.4.3"
4
+ description = "ๅŸบไบŽ FastAPI ้‡ๆž„็š„ Grok2API๏ผŒ้€‚้…ๆœ€ๆ–ฐ็š„Web่ฐƒ็”จๆ ผๅผ๏ผŒๆ”ฏๆŒๆตๅผๅฏน่ฏใ€ๅ›พๅƒ็”Ÿๆˆใ€ๅ›พๅƒ็ผ–่พ‘ใ€่”็ฝ‘ๆœ็ดขใ€่ง†้ข‘็”Ÿๆˆใ€ๆทฑๅบฆๆ€่€ƒ๏ผŒๆ”ฏๆŒๅทๆฑ ๅนถๅ‘่ฐƒ็”จๅ’Œ่‡ชๅŠจ่ดŸ่ฝฝๅ‡่กกใ€‚"
5
+ readme = "README.md"
6
+ requires-python = ">=3.13"
7
+ dependencies = [
8
+ "aiofiles==25.1.0",
9
+ "aiomysql==0.2.0",
10
+ "curl-cffi==0.13.0",
11
+ "fastapi==0.119.0",
12
+ "pydantic==2.12.2",
13
+ "python-dotenv==1.1.1",
14
+ "redis==6.4.0",
15
+ "requests==2.32.5",
16
+ "starlette==0.48.0",
17
+ "toml==0.10.2",
18
+ "uvloop==0.21.0 ; sys_platform != 'win32'",
19
+ "uvicorn==0.37.0",
20
+ "portalocker==3.0.0",
21
+ "fastmcp==2.12.4",
22
+ "cryptography==46.0.3",
23
+ "orjson==3.11.4",
24
+ "aiohttp==3.13.2",
25
+ ]
readme.md ADDED
@@ -0,0 +1,254 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Grok2api
3
+ emoji: ๐Ÿฆ€
4
+ colorFrom: indigo
5
+ colorTo: red
6
+ sdk: docker
7
+ pinned: false
8
+ ---
9
+ # Grok2API
10
+
11
+ ๅŸบไบŽ **FastAPI** ้‡ๆž„็š„ Grok2API๏ผŒๅ…จ้ข้€‚้…ๆœ€ๆ–ฐ Web ่ฐƒ็”จๆ ผๅผ๏ผŒๆ”ฏๆŒๆตๅผๅฏน่ฏใ€ๅ›พๅƒ็”Ÿๆˆใ€ๅ›พๅƒ็ผ–่พ‘ใ€่”็ฝ‘ๆœ็ดขใ€ๆทฑๅบฆๆ€่€ƒ๏ผŒๅทๆฑ ๅนถๅ‘ไธŽ่‡ชๅŠจ่ดŸ่ฝฝๅ‡่กกไธ€ไฝ“ๅŒ–ใ€‚
12
+
13
+ ## ๐Ÿ†• Fork ๅขžๅผบๅŠŸ่ƒฝ
14
+
15
+ ๆœฌ Fork ๅœจๅŽŸ็‰ˆๅŸบ็ก€ไธŠๆ–ฐๅขžไปฅไธ‹ๅŠŸ่ƒฝ๏ผš
16
+
17
+ - **ๅคš Key ็ฎก็†ไธŽๆŒไน…ๅŒ–**๏ผšๆ”ฏๆŒ็ฎก็†ๅ‘˜ๆ‰น้‡ๅˆ›ๅปบใ€ๅค‡ๆณจใ€ๅˆ ้™ค API Key๏ผŒๆ”ฏๆŒๅคš้€‰ๆ‰น้‡ๆ“ไฝœใ€‚ๆ‰€ๆœ‰ๅฏ†้’ฅๅ‡ๅฎž็ŽฐๆŒไน…ๅŒ–ๅญ˜ๅ‚จ๏ผŒ้‡ๅฏไธไธขๅคฑใ€‚
18
+ - **ๆ—ฅๅฟ—ๅฎก่ฎกๆ”ฏๆŒ**๏ผšๅฎžๆ—ถ่ฎฐๅฝ•่ฏทๆฑ‚็ป†่Š‚๏ผŒไธ”ๆ—ฅๅฟ—ๆ•ฐๆฎๆ”ฏๆŒๆ–‡ไปถๆŒไน…ๅŒ–ๅญ˜ๅ‚จใ€‚
19
+ - **ๅนถๅ‘ๆ€ง่ƒฝไผ˜ๅŒ– (Critical)**๏ผš้‡ๆž„ไบ†ๅบ•ๅฑ‚็š„ Grok ่ฏทๆฑ‚ๅ’Œๅ“ๅบ”ๅค„็†้€ป่พ‘ใ€‚้‡‡็”จๅ…จๅผ‚ๆญฅๆตๅผ่ฟญไปฃ (`aiter_lines`)๏ผŒๅฝปๅบ•่งฃๅ†ณไบ†ๅœจๆถˆๆฏ็”Ÿๆˆ่ฟ‡็จ‹ไธญๅŽๅฐ็ฎก็†้ขๆฟโ€œๅกๆญปโ€ๆˆ–ๅ“ๅบ”็ผ“ๆ…ข็š„้—ฎ้ข˜ใ€‚
20
+ - **Token ๆ™บ่ƒฝๅ†ทๅด**๏ผš่ฏทๆฑ‚ๅคฑ่ดฅๅŽ่‡ชๅŠจๅ†ทๅด๏ผŒ้ฟๅ…่ฟž็ปญไฝฟ็”จๆ•…้šœ Token
21
+ - ๆ™ฎ้€š้”™่ฏฏ๏ผšๅ†ทๅด 5 ๆฌก่ฏทๆฑ‚
22
+ - 429 ้™ๆต + ๆœ‰้ขๅบฆ๏ผšๅ†ทๅด 1 ๅฐๆ—ถ
23
+ - 429 ้™ๆต + ๆ— ้ขๅบฆ๏ผšๅ†ทๅด 10 ๅฐๆ—ถ
24
+ - **ไธ€้”ฎๅˆทๆ–ฐๆ‰€ๆœ‰ Token**๏ผšๅŽๅฐๆŒ‰้’ฎๆ‰น้‡ๅˆทๆ–ฐๅ‰ฉไฝ™ๆฌกๆ•ฐ๏ผŒๅธฆๅฎžๆ—ถ่ฟ›ๅบฆๆ˜พ็คบ
25
+ - **ๅนถๅ‘ไฟๆŠค**๏ผšๅˆทๆ–ฐไปปๅŠก่ฟ›่กŒไธญ่‡ชๅŠจๆ‹’็ป้‡ๅค่ฏทๆฑ‚
26
+ - **่ฏทๆฑ‚็ปŸ่ฎกไธŽๆŒไน…ๅŒ–**๏ผšๆŒ‰ๅฐๆ—ถ/ๅคฉ็ปŸ่ฎก่ฏทๆฑ‚่ถ‹ๅŠฟ๏ผŒๅŒ…ๅซๆˆๅŠŸ็އๅ’Œๆจกๅž‹ๅˆ†ๅธƒๅ›พ่กจ๏ผŒ็ปŸ่ฎกๆ•ฐๆฎๆ”ฏๆŒๆŒไน…ๅŒ–ใ€‚
27
+ - **็ผ“ๅญ˜้ข„่งˆ**๏ผšๅŽๅฐๆทปๅŠ ็ผ“ๅญ˜้ข„่งˆๆฟๅ—๏ผŒๅฏๆŸฅ็œ‹็ผ“ๅญ˜็š„ๅ›พ็‰‡/่ง†้ข‘
28
+ <br>
29
+
30
+ ## ไฝฟ็”จ่ฏดๆ˜Ž
31
+
32
+ ### ่ฐƒ็”จๆฌกๆ•ฐไธŽ้…้ข
33
+
34
+ - **ๆ™ฎ้€š่ดฆๅท๏ผˆBasic๏ผ‰**๏ผšๅ…่ดนไฝฟ็”จ **80 ๆฌก / 20 ๅฐๆ—ถ**
35
+ - **Super ่ดฆๅท**๏ผš้…้ขๅพ…ๅฎš๏ผˆไฝœ่€…ๆœชๆต‹๏ผ‰
36
+ - ็ณป็ปŸ่‡ชๅŠจ่ดŸ่ฝฝๅ‡่กกๅ„่ดฆๅท่ฐƒ็”จๆฌกๆ•ฐ๏ผŒๅฏๅœจ**็ฎก็†้กต้ข**ๅฎžๆ—ถๆŸฅ็œ‹็”จ้‡ไธŽ็Šถๆ€
37
+
38
+ ### ๅ›พๅƒ็”ŸๆˆๅŠŸ่ƒฝ
39
+
40
+ - ๅœจๅฏน่ฏๅ†…ๅฎนไธญ่พ“ๅ…ฅๅฆ‚โ€œ็ป™ๆˆ‘็”ปไธ€ไธชๆœˆไบฎโ€่‡ชๅŠจ่งฆๅ‘ๅ›พ็‰‡็”Ÿๆˆ
41
+ - ๆฏๆฌกไปฅ **Markdown ๆ ผๅผ่ฟ”ๅ›žไธคๅผ ๅ›พ็‰‡**๏ผŒๅ…ฑๆถˆ่€— 4 ๆฌก้ขๅบฆ
42
+ - **ๆณจๆ„๏ผšGrok ็š„ๅ›พ็‰‡็›ด้“พๅ— 403 ้™ๅˆถ๏ผŒ็ณป็ปŸ่‡ชๅŠจ็ผ“ๅญ˜ๅ›พ็‰‡ๅˆฐๆœฌๅœฐใ€‚ๅฟ…้กปๆญฃ็กฎ่ฎพ็ฝฎ `Base Url` ไปฅ็กฎไฟๅ›พ็‰‡่ƒฝๆญฃๅธธๆ˜พ็คบ๏ผ**
43
+
44
+ ### ่ง†้ข‘็”ŸๆˆๅŠŸ่ƒฝ
45
+ - ้€‰ๆ‹ฉ `grok-imagine-0.9` ๆจกๅž‹๏ผŒไผ ๅ…ฅๅ›พ็‰‡ๅ’Œๆ็คบ่ฏๅณๅฏ๏ผˆๆ–นๅผๅ’Œ OpenAI ็š„ๅ›พ็‰‡ๅˆ†ๆž่ฐƒ็”จๆ ผๅผไธ€่‡ด๏ผ‰
46
+ - ่ฟ”ๅ›žๆ ผๅผไธบ `<video src="{full_video_url}" controls="controls"></video>`
47
+ - **ๆณจๆ„๏ผšGrok ็š„่ง†้ข‘็›ด้“พๅ— 403 ้™ๅˆถ๏ผŒ็ณป็ปŸ่‡ชๅŠจ็ผ“ๅญ˜ๅ›พ็‰‡ๅˆฐๆœฌๅœฐใ€‚ๅฟ…้กปๆญฃ็กฎ่ฎพ็ฝฎ `Base Url` ไปฅ็กฎไฟ่ง†้ข‘่ƒฝๆญฃๅธธๆ˜พ็คบ๏ผ**
48
+
49
+ ```
50
+ curl https://ไฝ ็š„ๆœๅŠกๅ™จๅœฐๅ€/v1/chat/completions \
51
+ -H "Content-Type: application/json" \
52
+ -H "Authorization: Bearer $GROK2API_API_KEY" \
53
+ -d '{
54
+ "model": "grok-imagine-0.9",
55
+ "messages": [
56
+ {
57
+ "role": "user",
58
+ "content": [
59
+ {
60
+ "type": "text",
61
+ "text": "่ฎฉๅคช้˜ณๅ‡่ตทๆฅ"
62
+ },
63
+ {
64
+ "type": "image_url",
65
+ "image_url": {
66
+ "url": "https://your-image.jpg"
67
+ }
68
+ }
69
+ ]
70
+ }
71
+ ]
72
+ }'
73
+ ```
74
+
75
+ ### ๅ…ณไบŽ `x_statsig_id`
76
+
77
+ - `x_statsig_id` ๆ˜ฏ Grok ็”จไบŽๅๆœบๅ™จไบบ็š„ Token๏ผŒๆœ‰้€†ๅ‘่ต„ๆ–™ๅฏๅ‚่€ƒ
78
+ - **ๅปบ่ฎฎๆ–ฐๆ‰‹ๅ‹ฟไฟฎๆ”น้…็ฝฎ๏ผŒไฟ็•™้ป˜่ฎคๅ€ผๅณๅฏ**
79
+ - ๅฐ่ฏ•็”จ Camoufox ็ป•่ฟ‡ 403 ่‡ชๅŠจ่Žท id๏ผŒไฝ† grok ็Žฐๅทฒ้™ๅˆถ้ž็™ป้™†็š„`x_statsig_id`๏ผŒๆ•…ๅผƒ็”จ๏ผŒ้‡‡็”จๅ›บๅฎšๅ€ผไปฅๅ…ผๅฎนๆ‰€ๆœ‰่ฏทๆฑ‚
80
+
81
+ <br>
82
+
83
+ ## ๅฆ‚ไฝ•้ƒจ็ฝฒ
84
+
85
+ ### ๆ–นๅผไธ€๏ผšDocker Compose๏ผˆๆŽจ่๏ผ‰
86
+
87
+ ็”ฑไบŽๆœฌ้กน็›ฎๅŒ…ๅซไฟฎๆ”น๏ผŒๅปบ่ฎฎ็›ดๆŽฅๆž„ๅปบ่ฟ่กŒ๏ผš
88
+
89
+ 1. ๅ…‹้š†ๆœฌไป“ๅบ“
90
+ ```bash
91
+ git clone https://github.com/Tomiya233/grok2api.git
92
+ cd grok2api
93
+ ```
94
+
95
+ 2. ๅฏๅŠจๆœๅŠก
96
+ ```bash
97
+ docker-compose up -d --build
98
+ ```
99
+
100
+ **docker-compose.yml ๅ‚่€ƒ๏ผš**
101
+ ```yaml
102
+ services:
103
+ grok2api:
104
+ build: .
105
+ image: grok2api:latest
106
+ container_name: grok2api
107
+ restart: always
108
+ ports:
109
+ - "8000:8000"
110
+ volumes:
111
+ - grok_data:/app/data
112
+ - ./logs:/app/logs
113
+ environment:
114
+ - LOG_LEVEL=INFO
115
+ logging:
116
+ driver: "json-file"
117
+ options:
118
+ max-size: "10m"
119
+ max-file: "3"
120
+
121
+ volumes:
122
+ grok_data:
123
+ ```
124
+
125
+ ### ๆ–นๅผไบŒ๏ผšPython ็›ดๆŽฅ่ฟ่กŒ
126
+
127
+ **ๅ‰็ฝฎ่ฆๆฑ‚**๏ผšPython 3.10+๏ผŒๅปบ่ฎฎไฝฟ็”จ `uv` ๅŒ…็ฎก็†ๅ™จ
128
+
129
+ 1. ๅฎ‰่ฃ… uv
130
+ ```bash
131
+ curl -LsSf https://astral.sh/uv/install.sh | sh
132
+ ```
133
+
134
+ 2. ่ฟ่กŒๆœๅŠก
135
+ ```bash
136
+ # ๅฎ‰่ฃ…ไพ่ต–ๅนถ่ฟ่กŒ
137
+ uv sync
138
+ uv run python main.py
139
+ ```
140
+
141
+ ๆœๅŠก้ป˜่ฎค่ฟ่กŒๅœจ `http://127.0.0.1:8000`
142
+
143
+ ### ็Žฏๅขƒๅ˜้‡่ฏดๆ˜Ž
144
+
145
+ | ็Žฏๅขƒๅ˜้‡ | ๅฟ…ๅกซ | ่ฏดๆ˜Ž | ็คบไพ‹ |
146
+ |---------------|------|-----------------------------------------|------|
147
+ | STORAGE_MODE | ๅฆ | ๅญ˜ๅ‚จๆจกๅผ๏ผšfile/mysql/redis | file |
148
+ | DATABASE_URL | ๅฆ | ๆ•ฐๆฎๅบ“่ฟžๆŽฅURL๏ผˆMySQL/Redisๆจกๅผๆ—ถๅฟ…้œ€๏ผ‰ | mysql://user:pass@host:3306/db |
149
+
150
+ **ๅญ˜ๅ‚จๆจกๅผ๏ผš**
151
+ - `file`: ๆœฌๅœฐๆ–‡ไปถๅญ˜ๅ‚จ๏ผˆ้ป˜่ฎค๏ผ‰
152
+ - `mysql`: MySQLๆ•ฐๆฎๅบ“ๅญ˜ๅ‚จ๏ผŒ้œ€่ฎพ็ฝฎDATABASE_URL
153
+ - `redis`: Redis็ผ“ๅญ˜ๅญ˜ๅ‚จ๏ผŒ้œ€่ฎพ็ฝฎDATABASE_URL
154
+
155
+ <br>
156
+
157
+ ## ๆŽฅๅฃ่ฏดๆ˜Ž
158
+
159
+ > ไธŽ OpenAI ๅฎ˜ๆ–นๆŽฅๅฃๅฎŒๅ…จๅ…ผๅฎน๏ฟฝ๏ฟฝ๏ฟฝAPI ่ฏทๆฑ‚้œ€้€š่ฟ‡ **Authorization header** ่ฎค่ฏ
160
+
161
+ | ๆ–นๆณ• | ็ซฏ็‚น | ๆ่ฟฐ | ๆ˜ฏๅฆ้œ€่ฆ่ฎค่ฏ |
162
+ |-------|------------------------------|------------------------------------|------|
163
+ | POST | `/v1/chat/completions` | ๅˆ›ๅปบ่Šๅคฉๅฏน่ฏ๏ผˆๆตๅผ/้žๆตๅผ๏ผ‰ | โœ… |
164
+ | GET | `/v1/models` | ่Žทๅ–ๅ…จ้ƒจๆ”ฏๆŒๆจกๅž‹ | โœ… |
165
+ | GET | `/images/{img_path}` | ่Žทๅ–็”Ÿๆˆๅ›พ็‰‡ๆ–‡ไปถ | โŒ |
166
+
167
+ <br>
168
+
169
+ <details>
170
+ <summary>็ฎก็†ไธŽ็ปŸ่ฎกๆŽฅๅฃ๏ผˆๅฑ•ๅผ€ๆŸฅ็œ‹ๆ›ดๅคš๏ผ‰</summary>
171
+
172
+ | ๆ–นๆณ• | ็ซฏ็‚น | ๆ่ฟฐ | ่ฎค่ฏ |
173
+ |-------|-------------------------|--------------------|------|
174
+ | GET | /login | ็ฎก็†ๅ‘˜็™ปๅฝ•้กต้ข | โŒ |
175
+ | GET | /manage | ็ฎก็†ๆŽงๅˆถๅฐ้กต้ข | โŒ |
176
+ | POST | /api/login | ็ฎก็†ๅ‘˜็™ปๅฝ•่ฎค่ฏ | โŒ |
177
+ | POST | /api/logout | ็ฎก็†ๅ‘˜็™ปๅ‡บ | โœ… |
178
+ | GET | /api/tokens | ่Žทๅ– Token ๅˆ—่กจ | โœ… |
179
+ | POST | /api/tokens/add | ๆ‰น้‡ๆทปๅŠ  Token | โœ… |
180
+ | POST | /api/tokens/delete | ๆ‰น้‡ๅˆ ้™ค Token | โœ… |
181
+ | GET | /api/settings | ่Žทๅ–็ณป็ปŸ้…็ฝฎ | โœ… |
182
+ | POST | /api/settings | ๆ›ดๆ–ฐ็ณป็ปŸ้…็ฝฎ | โœ… |
183
+ | GET | /api/cache/size | ่Žทๅ–็ผ“ๅญ˜ๅคงๅฐ | โœ… |
184
+ | POST | /api/cache/clear | ๆธ…็†ๆ‰€ๆœ‰็ผ“ๅญ˜ | โœ… |
185
+ | POST | /api/cache/clear/images | ๆธ…็†ๅ›พ็‰‡็ผ“ๅญ˜ | โœ… |
186
+ | POST | /api/cache/clear/videos | ๆธ…็†่ง†้ข‘็ผ“ๅญ˜ | โœ… |
187
+ | GET | /api/stats | ่Žทๅ–็ปŸ่ฎกไฟกๆฏ | โœ… |
188
+ | POST | /api/tokens/tags | ๆ›ดๆ–ฐ Token ๆ ‡็ญพ | โœ… |
189
+ | POST | /api/tokens/note | ๆ›ดๆ–ฐ Token ๅค‡ๆณจ | โœ… |
190
+ | POST | /api/tokens/test | ๆต‹่ฏ• Token ๅฏ็”จๆ€ง | โœ… |
191
+ | GET | /api/tokens/tags/all | ่Žทๅ–ๆ‰€ๆœ‰ๆ ‡็ญพๅˆ—่กจ | โœ… |
192
+ | GET | /api/storage/mode | ่Žทๅ–ๅญ˜ๅ‚จๆจกๅผไฟกๆฏ | โœ… |
193
+ | POST | /api/tokens/refresh-all | ไธ€้”ฎๅˆทๆ–ฐๆ‰€ๆœ‰Token | โœ… |
194
+ | GET | /api/tokens/refresh-progress | ่Žทๅ–ๅˆทๆ–ฐ่ฟ›ๅบฆ | โœ… |
195
+ | GET | /api/keys | ่Žทๅ– API Key ๅˆ—่กจ | โœ… |
196
+ | POST | /api/keys/add | ๅˆ›ๅปบๆ–ฐ API Key | โœ… |
197
+ | POST | /api/keys/delete | ๅˆ ้™ค API Key | โœ… |
198
+ | POST | /api/keys/status | ๅˆ‡ๆข Key ๅฏ็”จ็Šถๆ€ | โœ… |
199
+ | POST | /api/keys/name | ไฟฎๆ”น Key ๅค‡ๆณจๅ็งฐ | โœ… |
200
+ | GET | /api/logs | ่Žทๅ–่ฏทๆฑ‚ๆ—ฅๅฟ—(1000ๆก)| โœ… |
201
+ | POST | /api/logs/clear | ๆธ…็ฉบๆ‰€ๆœ‰ๅฎก่ฎกๆ—ฅๅฟ— | โœ… |
202
+
203
+ </details>
204
+
205
+ <br>
206
+
207
+ ## ๅฏ็”จๆจกๅž‹ไธ€่งˆ
208
+
209
+ | ๆจกๅž‹ๅ็งฐ | ่ฎกๆฌก | ่ดฆๆˆท็ฑปๅž‹ | ๅ›พๅƒ็”Ÿๆˆ/็ผ–่พ‘ | ๆทฑๅบฆๆ€่€ƒ | ่”็ฝ‘ๆœ็ดข | ่ง†้ข‘็”Ÿๆˆ |
210
+ |------------------------|--------|--------------|--------------|----------|----------|----------|
211
+ | `grok-4.1` | 1 | Basic/Super | โœ… | โœ… | โœ… | โŒ |
212
+ | `grok-4.1-thinking` | 1 | Basic/Super | โœ… | โœ… | โœ… | โŒ |
213
+ | `grok-imagine-0.9` | - | Basic/Super | โœ… | โŒ | โŒ | โœ… |
214
+ | `grok-4-fast` | 1 | Basic/Super | โœ… | โœ… | โœ… | โŒ |
215
+ | `grok-4-fast-expert` | 4 | Basic/Super | โœ… | โœ… | โœ… | โŒ |
216
+ | `grok-4-expert` | 4 | Basic/Super | โœ… | โœ… | โœ… | โŒ |
217
+ | `grok-4-heavy` | 1 | Super | โœ… | โœ… | โœ… | โŒ |
218
+ | `grok-3-fast` | 1 | Basic/Super | โœ… | โŒ | โœ… | โŒ |
219
+
220
+ <br>
221
+
222
+ ## ้…็ฝฎๅ‚ๆ•ฐ่ฏดๆ˜Ž
223
+
224
+ > ๆœๅŠกๅฏๅŠจๅŽ๏ผŒ็™ปๅฝ• `/login` ็ฎก็†ๅŽๅฐ่ฟ›่กŒๅ‚ๆ•ฐ้…็ฝฎ
225
+
226
+ | ๅ‚ๆ•ฐๅ | ไฝœ็”จๅŸŸ | ๅฟ…ๅกซ | ่ฏดๆ˜Ž | ้ป˜่ฎคๅ€ผ |
227
+ |----------------------------|---------|------|-----------------------------------------|--------|
228
+ | admin_username | global | ๅฆ | ็ฎก็†ๅŽๅฐ็™ปๅฝ•็”จๆˆทๅ | "admin"|
229
+ | admin_password | global | ๅฆ | ็ฎก็†ๅŽๅฐ็™ปๅฝ•ๅฏ†็  | "admin"|
230
+ | log_level | global | ๅฆ | ๆ—ฅๅฟ—็บงๅˆซ๏ผšDEBUG/INFO/... | "INFO" |
231
+ | image_mode | global | ๅฆ | ๅ›พ็‰‡่ฟ”ๅ›žๆจกๅผ๏ผšurl/base64 | "url" |
232
+ | image_cache_max_size_mb | global | ๅฆ | ๅ›พ็‰‡็ผ“ๅญ˜ๆœ€ๅคงๅฎน้‡(MB) | 512 |
233
+ | video_cache_max_size_mb | global | ๅฆ | ่ง†้ข‘็ผ“ๅญ˜ๆœ€ๅคงๅฎน้‡(MB) | 1024 |
234
+ | base_url | global | ๅฆ | ๆœๅŠกๅŸบ็ก€URL/ๅ›พ็‰‡่ฎฟ้—ฎๅŸบๅ‡† | "" |
235
+ | api_key | grok | ๅฆ | API ๅฏ†้’ฅ๏ผˆๅฏ้€‰ๅŠ ๅผบๅฎ‰ๅ…จ๏ผ‰ | "" |
236
+ | proxy_url | grok | ๅฆ | HTTPไปฃ็†ๆœๅŠกๅ™จๅœฐๅ€ | "" |
237
+ | stream_chunk_timeout | grok | ๅฆ | ๆตๅผๅˆ†ๅ—่ถ…ๆ—ถๆ—ถ้—ด(็ง’) | 120 |
238
+ | stream_first_response_timeout | grok | ๅฆ | ๆตๅผ้ฆ–ๆฌกๅ“ๅบ”่ถ…ๆ—ถๆ—ถ้—ด(็ง’) | 30 |
239
+ | stream_total_timeout | grok | ๅฆ | ๆตๅผๆ€ป่ถ…ๆ—ถๆ—ถ้—ด(็ง’) | 600 |
240
+ | cf_clearance | grok | ๅฆ | Cloudflareๅฎ‰ๅ…จไปค็‰Œ | "" |
241
+ | x_statsig_id | grok | ๆ˜ฏ | ๅๆœบๅ™จไบบๅ”ฏไธ€ๆ ‡่ฏ†็ฌฆ | "ZTpUeXBlRXJyb3I6IENhbm5vdCByZWFkIHByb3BlcnRpZXMgb2YgdW5kZWZpbmVkIChyZWFkaW5nICdjaGlsZE5vZGVzJyk=" |
242
+ | filtered_tags | grok | ๅฆ | ่ฟ‡ๆปคๅ“ๅบ”ๆ ‡็ญพ๏ผˆ้€—ๅทๅˆ†้š”๏ผ‰ | "xaiartifact,xai:tool_usage_card,grok:render" |
243
+ | show_thinking | grok | ๅฆ | ๆ˜พ็คบๆ€่€ƒ่ฟ‡็จ‹ true(ๆ˜พ็คบ)/false(้š่—) | true |
244
+ | temporary | grok | ๅฆ | ไผš่ฏๆจกๅผ true(ไธดๆ—ถ)/false | true |
245
+
246
+ <br>
247
+
248
+ ## โš ๏ธ ๆณจๆ„ไบ‹้กน
249
+
250
+ ๆœฌ้กน็›ฎไป…ไพ›ๅญฆไน ไธŽ็ ”็ฉถ๏ผŒ่ฏท้ตๅฎˆ็›ธๅ…ณไฝฟ็”จๆกๆฌพ๏ผ
251
+
252
+ <br>
253
+
254
+ > ๆœฌ้กน็›ฎๅŸบไบŽไปฅไธ‹้กน็›ฎๅญฆไน ้‡ๆž„๏ผŒ็‰นๅˆซๆ„Ÿ่ฐข๏ผš[LINUX DO](https://linux.do)ใ€[VeroFess/grok2api](https://github.com/VeroFess/grok2api)ใ€[xLmiler/grok2api_python](https://github.com/xLmiler/grok2api_python)
requirements.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ toml==0.10.2
2
+ fastapi==0.119.0
3
+ uvicorn==0.37.0
4
+ uvloop==0.21.0; sys_platform != 'win32'
5
+ python-dotenv==1.1.1
6
+ curl_cffi==0.13.0
7
+ requests==2.32.5
8
+ starlette==0.48.0
9
+ pydantic==2.12.2
10
+ aiofiles==25.1.0
11
+ portalocker==3.0.0
12
+ aiomysql==0.2.0
13
+ redis==6.4.0
14
+ fastmcp==2.12.4
15
+ cryptography==46.0.3
16
+ orjson==3.11.4
17
+ aiohttp==3.13.2
test/test_concurrency.py ADDED
@@ -0,0 +1,276 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Grok2API ๅนถๅ‘ๆ€ง่ƒฝๆต‹่ฏ•่„šๆœฌ
4
+
5
+ ๆต‹่ฏ•ไธๅŒๅนถๅ‘็บงๅˆซไธ‹็š„APIๆ€ง่ƒฝ่กจ็Žฐ
6
+ """
7
+
8
+ import asyncio
9
+ import aiohttp
10
+ import time
11
+ import statistics
12
+ import argparse
13
+ from datetime import datetime
14
+ from typing import List, Dict, Any
15
+ import json
16
+
17
+
18
+ class ConcurrencyTester:
19
+ """ๅนถๅ‘ๆต‹่ฏ•ๅ™จ"""
20
+
21
+ def __init__(self, base_url: str, api_key: str = None):
22
+ self.base_url = base_url.rstrip('/')
23
+ self.api_key = api_key
24
+ self.results: List[Dict[str, Any]] = []
25
+
26
+ async def test_request(self, session: aiohttp.ClientSession, request_id: int) -> Dict[str, Any]:
27
+ """ๅ‘้€ๅ•ไธชๆต‹่ฏ•่ฏทๆฑ‚"""
28
+ url = f"{self.base_url}/v1/chat/completions"
29
+
30
+ headers = {
31
+ "Content-Type": "application/json"
32
+ }
33
+ if self.api_key:
34
+ headers["Authorization"] = f"Bearer {self.api_key}"
35
+
36
+ payload = {
37
+ "model": "grok-3-fast",
38
+ "messages": [
39
+ {"role": "user", "content": f"ๆต‹่ฏ•่ฏทๆฑ‚ #{request_id}๏ผŒ่ฏท็ฎ€็Ÿญๅ›žๅคOK"}
40
+ ],
41
+ "stream": False,
42
+ "max_tokens": 10
43
+ }
44
+
45
+ start_time = time.time()
46
+
47
+ try:
48
+ async with session.post(url, json=payload, headers=headers, timeout=30) as response:
49
+ status = response.status
50
+
51
+ if status == 200:
52
+ data = await response.json()
53
+ elapsed = time.time() - start_time
54
+
55
+ return {
56
+ "id": request_id,
57
+ "status": "success",
58
+ "http_status": status,
59
+ "elapsed": elapsed,
60
+ "response_length": len(json.dumps(data))
61
+ }
62
+ else:
63
+ elapsed = time.time() - start_time
64
+ error_text = await response.text()
65
+
66
+ return {
67
+ "id": request_id,
68
+ "status": "error",
69
+ "http_status": status,
70
+ "elapsed": elapsed,
71
+ "error": error_text[:200]
72
+ }
73
+
74
+ except asyncio.TimeoutError:
75
+ elapsed = time.time() - start_time
76
+ return {
77
+ "id": request_id,
78
+ "status": "timeout",
79
+ "elapsed": elapsed,
80
+ "error": "Request timeout"
81
+ }
82
+
83
+ except Exception as e:
84
+ elapsed = time.time() - start_time
85
+ return {
86
+ "id": request_id,
87
+ "status": "exception",
88
+ "elapsed": elapsed,
89
+ "error": str(e)
90
+ }
91
+
92
+ async def run_concurrent_test(self, concurrency: int, total_requests: int):
93
+ """่ฟ่กŒๅนถๅ‘ๆต‹่ฏ•"""
94
+ print(f"\n{'='*60}")
95
+ print(f"๐Ÿ“Š ๆต‹่ฏ•้…็ฝฎ๏ผšๅนถๅ‘ๆ•ฐ {concurrency}, ๆ€ป่ฏทๆฑ‚ๆ•ฐ {total_requests}")
96
+ print(f"{'='*60}")
97
+
98
+ connector = aiohttp.TCPConnector(limit=concurrency, limit_per_host=concurrency)
99
+ timeout = aiohttp.ClientTimeout(total=60)
100
+
101
+ async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session:
102
+ # ้ข„็ƒญ
103
+ print("๐Ÿ”ฅ ้ข„็ƒญไธญ...")
104
+ await self.test_request(session, 0)
105
+
106
+ # ๅผ€ๅง‹ๆต‹่ฏ•
107
+ print(f"๐Ÿš€ ๅผ€ๅง‹ๅนถๅ‘ๆต‹่ฏ•...")
108
+ start_time = time.time()
109
+
110
+ # ๅˆ›ๅปบไปปๅŠก
111
+ tasks = []
112
+ for i in range(1, total_requests + 1):
113
+ task = asyncio.create_task(self.test_request(session, i))
114
+ tasks.append(task)
115
+
116
+ # ๆŽงๅˆถๅนถๅ‘ๆ•ฐ
117
+ if len(tasks) >= concurrency:
118
+ results = await asyncio.gather(*tasks)
119
+ self.results.extend(results)
120
+ tasks = []
121
+
122
+ # ๆ˜พ็คบ่ฟ›ๅบฆ
123
+ print(f" ่ฟ›ๅบฆ: {i}/{total_requests} ({i/total_requests*100:.1f}%)", end='\r')
124
+
125
+ # ๅค„็†ๅ‰ฉไฝ™ไปปๅŠก
126
+ if tasks:
127
+ results = await asyncio.gather(*tasks)
128
+ self.results.extend(results)
129
+
130
+ total_time = time.time() - start_time
131
+
132
+ # ็ปŸ่ฎกๅ’Œ่พ“ๅ‡บ
133
+ self.print_statistics(concurrency, total_requests, total_time)
134
+
135
+ def print_statistics(self, concurrency: int, total_requests: int, total_time: float):
136
+ """ๆ‰“ๅฐ็ปŸ่ฎกไฟกๆฏ"""
137
+ success_results = [r for r in self.results if r["status"] == "success"]
138
+ error_results = [r for r in self.results if r["status"] != "success"]
139
+
140
+ success_count = len(success_results)
141
+ error_count = len(error_results)
142
+
143
+ if success_results:
144
+ latencies = [r["elapsed"] for r in success_results]
145
+ avg_latency = statistics.mean(latencies)
146
+ min_latency = min(latencies)
147
+ max_latency = max(latencies)
148
+ p50_latency = statistics.median(latencies)
149
+ p95_latency = sorted(latencies)[int(len(latencies) * 0.95)] if len(latencies) > 1 else latencies[0]
150
+ p99_latency = sorted(latencies)[int(len(latencies) * 0.99)] if len(latencies) > 1 else latencies[0]
151
+ else:
152
+ avg_latency = min_latency = max_latency = p50_latency = p95_latency = p99_latency = 0
153
+
154
+ throughput = total_requests / total_time if total_time > 0 else 0
155
+
156
+ print(f"\n\n{'='*60}")
157
+ print(f"๐Ÿ“ˆ ๆต‹่ฏ•็ป“ๆžœ็ปŸ่ฎก")
158
+ print(f"{'='*60}")
159
+ print(f" ๆต‹่ฏ•ๆ—ถ้—ด: {total_time:.2f}s")
160
+ print(f" ๆ€ป่ฏทๆฑ‚ๆ•ฐ: {total_requests}")
161
+ print(f" ๅนถๅ‘ๆ•ฐ: {concurrency}")
162
+ print(f"")
163
+ print(f" ๆˆๅŠŸ่ฏทๆฑ‚: {success_count} ({success_count/total_requests*100:.1f}%)")
164
+ print(f" ๅคฑ่ดฅ่ฏทๆฑ‚: {error_count} ({error_count/total_requests*100:.1f}%)")
165
+ print(f"")
166
+ print(f" ๅžๅ้‡: {throughput:.2f} req/s")
167
+ print(f"")
168
+ print(f" ๅปถ่ฟŸ็ปŸ่ฎก:")
169
+ print(f" ๆœ€ๅฐ: {min_latency*1000:.0f}ms")
170
+ print(f" ๅนณๅ‡: {avg_latency*1000:.0f}ms")
171
+ print(f" ๆœ€ๅคง: {max_latency*1000:.0f}ms")
172
+ print(f" P50: {p50_latency*1000:.0f}ms")
173
+ print(f" P95: {p95_latency*1000:.0f}ms")
174
+ print(f" P99: {p99_latency*1000:.0f}ms")
175
+
176
+ # ้”™่ฏฏ่ฏฆๆƒ…
177
+ if error_results:
178
+ print(f"\n โš ๏ธ ้”™่ฏฏ่ฏฆๆƒ…:")
179
+ error_types = {}
180
+ for r in error_results:
181
+ error_type = r.get("status", "unknown")
182
+ error_types[error_type] = error_types.get(error_type, 0) + 1
183
+
184
+ for error_type, count in error_types.items():
185
+ print(f" {error_type}: {count}")
186
+
187
+ print(f"{'='*60}\n")
188
+
189
+ # ๆ€ง่ƒฝ่ฏ„็บง
190
+ self.print_performance_rating(throughput, avg_latency)
191
+
192
+ def print_performance_rating(self, throughput: float, avg_latency: float):
193
+ """ๆ‰“ๅฐๆ€ง่ƒฝ่ฏ„็บง"""
194
+ print(f"๐ŸŽฏ ๆ€ง่ƒฝ่ฏ„็บง:")
195
+
196
+ # ๅžๅ้‡่ฏ„็บง
197
+ if throughput >= 100:
198
+ rating = "โญโญโญโญโญ ไผ˜็ง€"
199
+ elif throughput >= 60:
200
+ rating = "โญโญโญโญ ่‰ฏๅฅฝ"
201
+ elif throughput >= 30:
202
+ rating = "โญโญโญ ไธญ็ญ‰"
203
+ elif throughput >= 10:
204
+ rating = "โญโญ ่พƒไฝŽ"
205
+ else:
206
+ rating = "โญ ้œ€ไผ˜ๅŒ–"
207
+
208
+ print(f" ๅžๅ้‡ ({throughput:.1f} req/s): {rating}")
209
+
210
+ # ๅปถ่ฟŸ่ฏ„็บง
211
+ if avg_latency < 0.5:
212
+ rating = "โญโญโญโญโญ ไผ˜็ง€"
213
+ elif avg_latency < 1.0:
214
+ rating = "โญโญโญโญ ่‰ฏๅฅฝ"
215
+ elif avg_latency < 2.0:
216
+ rating = "โญโญโญ ไธญ็ญ‰"
217
+ elif avg_latency < 5.0:
218
+ rating = "โญโญ ่พƒ้ซ˜"
219
+ else:
220
+ rating = "โญ ้œ€ไผ˜ๅŒ–"
221
+
222
+ print(f" ๅนณๅ‡ๅปถ่ฟŸ ({avg_latency*1000:.0f}ms): {rating}")
223
+ print()
224
+
225
+
226
+ async def main():
227
+ """ไธปๅ‡ฝๆ•ฐ"""
228
+ parser = argparse.ArgumentParser(description='Grok2API ๅนถๅ‘ๆ€ง่ƒฝๆต‹่ฏ•')
229
+ parser.add_argument('--url', default='http://localhost:8000', help='API ๅŸบ็ก€URL')
230
+ parser.add_argument('--key', default='', help='API Key๏ผˆๅฏ้€‰๏ผ‰')
231
+ parser.add_argument('-c', '--concurrency', type=int, default=10, help='ๅนถๅ‘ๆ•ฐ')
232
+ parser.add_argument('-n', '--requests', type=int, default=50, help='ๆ€ป่ฏทๆฑ‚ๆ•ฐ')
233
+ parser.add_argument('--multi-test', action='store_true', help='่ฟ่กŒๅคš็บงๅนถๅ‘ๆต‹่ฏ•')
234
+
235
+ args = parser.parse_args()
236
+
237
+ print(f"""
238
+ โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—
239
+ โ•‘ Grok2API ๅนถๅ‘ๆ€ง่ƒฝๆต‹่ฏ•ๅทฅๅ…ท โ•‘
240
+ โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
241
+
242
+ ๐Ÿ”— ๆต‹่ฏ•็›ฎๆ ‡: {args.url}
243
+ ๐Ÿ”‘ API Key: {'ๅทฒ่ฎพ็ฝฎ' if args.key else 'ๆœช่ฎพ็ฝฎ'}
244
+ โฐ ๅผ€ๅง‹ๆ—ถ้—ด: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
245
+ """)
246
+
247
+ tester = ConcurrencyTester(args.url, args.key)
248
+
249
+ if args.multi_test:
250
+ # ๅคš็บงๅนถๅ‘ๆต‹่ฏ•
251
+ test_configs = [
252
+ (5, 20), # 5ๅนถๅ‘๏ผŒ20่ฏทๆฑ‚
253
+ (10, 50), # 10ๅนถๅ‘๏ผŒ50่ฏทๆฑ‚
254
+ (20, 100), # 20ๅนถๅ‘๏ผŒ100่ฏทๆฑ‚
255
+ (50, 200), # 50ๅนถๅ‘๏ผŒ200่ฏทๆฑ‚
256
+ ]
257
+
258
+ for concurrency, requests in test_configs:
259
+ tester.results = [] # ๆธ…็ฉบ็ป“ๆžœ
260
+ await tester.run_concurrent_test(concurrency, requests)
261
+ await asyncio.sleep(2) # ้—ด้š”2็ง’
262
+ else:
263
+ # ๅ•ๆฌกๆต‹่ฏ•
264
+ await tester.run_concurrent_test(args.concurrency, args.requests)
265
+
266
+ print(f"\nโœ… ๆต‹่ฏ•ๅฎŒๆˆ๏ผ")
267
+ print(f"โฐ ็ป“ๆŸๆ—ถ้—ด: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
268
+
269
+
270
+ if __name__ == "__main__":
271
+ try:
272
+ asyncio.run(main())
273
+ except KeyboardInterrupt:
274
+ print("\n\nโš ๏ธ ๆต‹่ฏ•่ขซ็”จๆˆทไธญๆ–ญ")
275
+ except Exception as e:
276
+ print(f"\n\nโŒ ๆต‹่ฏ•ๅคฑ่ดฅ: {e}")
test/test_concurrency.sh ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Grok2API ๅนถๅ‘ๆต‹่ฏ•่„šๆœฌ๏ผˆShell็‰ˆๆœฌ๏ผ‰
4
+ # ไฝฟ็”จ curl ๅ’Œ GNU parallel ่ฟ›่กŒๅนถๅ‘ๆต‹่ฏ•
5
+
6
+ set -e
7
+
8
+ # ้…็ฝฎ
9
+ BASE_URL="${BASE_URL:-http://localhost:8000}"
10
+ API_KEY="${API_KEY:-}"
11
+ CONCURRENCY="${CONCURRENCY:-10}"
12
+ TOTAL_REQUESTS="${TOTAL_REQUESTS:-50}"
13
+
14
+ # ้ขœ่‰ฒ
15
+ RED='\033[0;31m'
16
+ GREEN='\033[0;32m'
17
+ YELLOW='\033[1;33m'
18
+ BLUE='\033[0;34m'
19
+ NC='\033[0m' # No Color
20
+
21
+ echo -e "${BLUE}โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—${NC}"
22
+ echo -e "${BLUE}โ•‘ Grok2API ๅนถๅ‘ๆ€ง่ƒฝๆต‹่ฏ•ๅทฅๅ…ท (Shell็‰ˆ) โ•‘${NC}"
23
+ echo -e "${BLUE}โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}"
24
+ echo ""
25
+ echo -e "${GREEN}๐Ÿ”— ๆต‹่ฏ•็›ฎๆ ‡:${NC} $BASE_URL"
26
+ echo -e "${GREEN}๐Ÿ”‘ API Key:${NC} ${API_KEY:-(ๆœช่ฎพ็ฝฎ)}"
27
+ echo -e "${GREEN}๐Ÿ“Š ๅนถๅ‘ๆ•ฐ:${NC} $CONCURRENCY"
28
+ echo -e "${GREEN}๐Ÿ“ˆ ๆ€ป่ฏทๆฑ‚ๆ•ฐ:${NC} $TOTAL_REQUESTS"
29
+ echo ""
30
+
31
+ # ๆฃ€ๆŸฅไพ่ต–
32
+ if ! command -v curl &> /dev/null; then
33
+ echo -e "${RED}โŒ ้”™่ฏฏ: ้œ€่ฆๅฎ‰่ฃ… curl${NC}"
34
+ exit 1
35
+ fi
36
+
37
+ # ๅˆ›ๅปบไธดๆ—ถ็›ฎๅฝ•
38
+ TMP_DIR=$(mktemp -d)
39
+ trap "rm -rf $TMP_DIR" EXIT
40
+
41
+ # ๅ•ไธช่ฏทๆฑ‚ๅ‡ฝๆ•ฐ
42
+ test_request() {
43
+ local request_id=$1
44
+ local start_time=$(date +%s.%N)
45
+
46
+ # ๆž„ๅปบ่ฏทๆฑ‚
47
+ local headers="Content-Type: application/json"
48
+ if [ -n "$API_KEY" ]; then
49
+ headers="${headers}\nAuthorization: Bearer ${API_KEY}"
50
+ fi
51
+
52
+ local response=$(curl -s -w "\n%{http_code}\n%{time_total}" \
53
+ -X POST "${BASE_URL}/v1/chat/completions" \
54
+ -H "Content-Type: application/json" \
55
+ ${API_KEY:+-H "Authorization: Bearer $API_KEY"} \
56
+ -d "{
57
+ \"model\": \"grok-3-fast\",
58
+ \"messages\": [{\"role\": \"user\", \"content\": \"ๆต‹่ฏ•่ฏทๆฑ‚ #${request_id}๏ผŒ่ฏท็ฎ€็Ÿญๅ›žๅคOK\"}],
59
+ \"stream\": false,
60
+ \"max_tokens\": 10
61
+ }" 2>&1)
62
+
63
+ local http_code=$(echo "$response" | tail -n 2 | head -n 1)
64
+ local time_total=$(echo "$response" | tail -n 1)
65
+
66
+ # ่ฎฐๅฝ•็ป“ๆžœ
67
+ echo "${request_id},${http_code},${time_total}" >> "$TMP_DIR/results.csv"
68
+
69
+ # ๆ˜พ็คบ่ฟ›ๅบฆ
70
+ echo -ne "\r ่ฟ›ๅบฆ: ${request_id}/${TOTAL_REQUESTS}"
71
+ }
72
+
73
+ # ๅฏผๅ‡บๅ‡ฝๆ•ฐไพ› parallel ไฝฟ็”จ
74
+ export -f test_request
75
+ export BASE_URL API_KEY TMP_DIR
76
+
77
+ # ๆธ…็ฉบ็ป“ๆžœๆ–‡ไปถ
78
+ echo "id,status,time" > "$TMP_DIR/results.csv"
79
+
80
+ echo -e "${YELLOW}๐Ÿš€ ๅผ€ๅง‹ๅนถๅ‘ๆต‹่ฏ•...${NC}"
81
+ START_TIME=$(date +%s.%N)
82
+
83
+ # ไฝฟ็”จ GNU parallel๏ผˆๅฆ‚ๆžœๅฏ็”จ๏ผ‰๏ผŒๅฆๅˆ™ไฝฟ็”จ็ฎ€ๅ•ๅพช็Žฏ
84
+ if command -v parallel &> /dev/null; then
85
+ seq 1 $TOTAL_REQUESTS | parallel -j $CONCURRENCY test_request {}
86
+ else
87
+ # ็ฎ€ๅ•็š„ๅŽๅฐไปปๅŠกๅนถๅ‘
88
+ for i in $(seq 1 $TOTAL_REQUESTS); do
89
+ test_request $i &
90
+
91
+ # ๆŽงๅˆถๅนถๅ‘ๆ•ฐ
92
+ if (( i % CONCURRENCY == 0 )); then
93
+ wait
94
+ fi
95
+ done
96
+ wait
97
+ fi
98
+
99
+ END_TIME=$(date +%s.%N)
100
+ TOTAL_TIME=$(echo "$END_TIME - $START_TIME" | bc)
101
+
102
+ echo -e "\n"
103
+
104
+ # ็ปŸ่ฎก็ป“ๆžœ
105
+ echo -e "${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}"
106
+ echo -e "${BLUE}๐Ÿ“ˆ ๆต‹่ฏ•็ป“ๆžœ็ปŸ่ฎก${NC}"
107
+ echo -e "${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}"
108
+
109
+ # ็ปŸ่ฎกๆˆๅŠŸ/ๅคฑ่ดฅ
110
+ SUCCESS_COUNT=$(awk -F',' '$2 == 200 {count++} END {print count+0}' "$TMP_DIR/results.csv")
111
+ ERROR_COUNT=$((TOTAL_REQUESTS - SUCCESS_COUNT))
112
+
113
+ echo -e " ๆต‹่ฏ•ๆ—ถ้—ด: ${TOTAL_TIME}s"
114
+ echo -e " ๆ€ป่ฏทๆฑ‚ๆ•ฐ: ${TOTAL_REQUESTS}"
115
+ echo -e " ๅนถๅ‘ๆ•ฐ: ${CONCURRENCY}"
116
+ echo ""
117
+ echo -e " ๆˆๅŠŸ่ฏทๆฑ‚: ${GREEN}${SUCCESS_COUNT}${NC} ($(echo "scale=1; $SUCCESS_COUNT * 100 / $TOTAL_REQUESTS" | bc)%)"
118
+ echo -e " ๅคฑ่ดฅ่ฏทๆฑ‚: ${RED}${ERROR_COUNT}${NC} ($(echo "scale=1; $ERROR_COUNT * 100 / $TOTAL_REQUESTS" | bc)%)"
119
+ echo ""
120
+
121
+ # ่ฎก็ฎ—ๅžๅ้‡
122
+ THROUGHPUT=$(echo "scale=2; $TOTAL_REQUESTS / $TOTAL_TIME" | bc)
123
+ echo -e " ๅžๅ้‡: ${GREEN}${THROUGHPUT}${NC} req/s"
124
+ echo ""
125
+
126
+ # ๅปถ่ฟŸ็ปŸ่ฎก๏ผˆๅช็ปŸ่ฎกๆˆๅŠŸ็š„่ฏทๆฑ‚๏ผ‰
127
+ if [ $SUCCESS_COUNT -gt 0 ]; then
128
+ echo -e " ๅปถ่ฟŸ็ปŸ่ฎก:"
129
+
130
+ # ๆๅ–ๆˆๅŠŸ่ฏทๆฑ‚็š„ๅปถ่ฟŸๆ—ถ้—ด
131
+ awk -F',' '$2 == 200 {print $3}' "$TMP_DIR/results.csv" | sort -n > "$TMP_DIR/latencies.txt"
132
+
133
+ MIN=$(head -n 1 "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
134
+ MAX=$(tail -n 1 "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
135
+ AVG=$(awk '{sum+=$1; count++} END {printf "%.0f", sum/count*1000}' "$TMP_DIR/latencies.txt")
136
+
137
+ # P50
138
+ P50_LINE=$((SUCCESS_COUNT / 2))
139
+ P50=$(sed -n "${P50_LINE}p" "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
140
+
141
+ # P95
142
+ P95_LINE=$(echo "scale=0; $SUCCESS_COUNT * 0.95 / 1" | bc)
143
+ P95=$(sed -n "${P95_LINE}p" "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
144
+
145
+ # P99
146
+ P99_LINE=$(echo "scale=0; $SUCCESS_COUNT * 0.99 / 1" | bc)
147
+ P99=$(sed -n "${P99_LINE}p" "$TMP_DIR/latencies.txt" | awk '{printf "%.0f", $1*1000}')
148
+
149
+ echo -e " ๆœ€ๅฐ: ${MIN}ms"
150
+ echo -e " ๅนณๅ‡: ${AVG}ms"
151
+ echo -e " ๆœ€ๅคง: ${MAX}ms"
152
+ echo -e " P50: ${P50}ms"
153
+ echo -e " P95: ${P95}ms"
154
+ echo -e " P99: ${P99}ms"
155
+ fi
156
+
157
+ echo -e "${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}"
158
+
159
+ # ๆ€ง่ƒฝ่ฏ„็บง
160
+ echo -e "${YELLOW}๐ŸŽฏ ๆ€ง่ƒฝ่ฏ„็บง:${NC}"
161
+
162
+ if (( $(echo "$THROUGHPUT >= 100" | bc -l) )); then
163
+ RATING="โญโญโญโญโญ ไผ˜็ง€"
164
+ elif (( $(echo "$THROUGHPUT >= 60" | bc -l) )); then
165
+ RATING="โญโญโญโญ ่‰ฏๅฅฝ"
166
+ elif (( $(echo "$THROUGHPUT >= 30" | bc -l) )); then
167
+ RATING="โญโญโญ ไธญ็ญ‰"
168
+ elif (( $(echo "$THROUGHPUT >= 10" | bc -l) )); then
169
+ RATING="โญโญ ่พƒไฝŽ"
170
+ else
171
+ RATING="โญ ้œ€ไผ˜ๅŒ–"
172
+ fi
173
+
174
+ echo -e " ๅžๅ้‡ (${THROUGHPUT} req/s): ${RATING}"
175
+
176
+ echo ""
177
+ echo -e "${GREEN}โœ… ๆต‹่ฏ•ๅฎŒๆˆ๏ผ${NC}"
test_key.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import json
3
+ import uuid
4
+
5
+ # ================= ้…็ฝฎๅŒบ =================
6
+ # 1. ๅœจ็ฎก็†ๅŽๅฐ [Key ็ฎก็†] ้กต้ขๅˆ›ๅปบไธ€ไธชๆ–ฐ Key
7
+ # 2. ๅฐ†ๆ–ฐ็”Ÿๆˆ็š„ sk-... ๅกซๅ…ฅไธ‹ๆ–น
8
+ API_KEY = "YOUR_NEW_API_KEY"
9
+ BASE_URL = "http://127.0.0.1:8000"
10
+ # ==========================================
11
+
12
+ def test_chat_completion():
13
+ print(f"ๅผ€ๅง‹ๆต‹่ฏ• Key: {API_KEY[:10]}...")
14
+
15
+ url = f"{BASE_URL}/v1/chat/completions"
16
+ headers = {
17
+ "Authorization": f"Bearer {API_KEY}",
18
+ "Content-Type": "application/json"
19
+ }
20
+
21
+ payload = {
22
+ "model": "grok-4-fast",
23
+ "messages": [
24
+ {"role": "user", "content": "Hello, who are you? Tell me a joke."}
25
+ ],
26
+ "stream": False
27
+ }
28
+
29
+ try:
30
+ response = requests.post(url, headers=headers, json=payload, timeout=30)
31
+ print(f"็Šถๆ€็ : {response.status_code}")
32
+
33
+ if response.status_code == 200:
34
+ result = response.json()
35
+ content = result['choices'][0]['message']['content']
36
+ print("--- ๅ“ๅบ”ๆˆๅŠŸ ---")
37
+ print(content)
38
+ print("---------------")
39
+ print("ๆต‹่ฏ•้€š่ฟ‡๏ผ็ŽฐๅœจๅŽป็ฎก็†ๅŽๅฐ [ๆ—ฅๅฟ—ๅฎก่ฎก] ็กฎ่ฎคๆ—ฅๅฟ—ไธญๆ˜ฏๅฆ่ฎฐๅฝ•ไบ†่ฏฅ่ฏทๆฑ‚ใ€‚")
40
+ else:
41
+ print(f"่ฏทๆฑ‚ๅคฑ่ดฅ: {response.text}")
42
+
43
+ except Exception as e:
44
+ print(f"ๅ‘็”Ÿ้”™่ฏฏ: {e}")
45
+
46
+ if __name__ == "__main__":
47
+ if API_KEY == "YOUR_NEW_API_KEY":
48
+ print("่ฏทๅ…ˆๅฐ†ไปฃ็ ไธญ็š„ API_KEY ๆ›ฟๆขไธบไฝ ๅˆšๆ‰็”Ÿๆˆ็š„ Key๏ผ")
49
+ else:
50
+ test_chat_completion()
uv.lock ADDED
The diff for this file is too large to render. See raw diff