ZyphrZero
commited on
Commit
·
c867606
1
Parent(s):
8d0dafa
🔧 refactor(helpers): 使用fake_useragent重构browser headers生成逻辑
Browse files- app/models/schemas.py +17 -4
- app/utils/helpers.py +86 -2
app/models/schemas.py
CHANGED
|
@@ -8,12 +8,14 @@ from pydantic import BaseModel
|
|
| 8 |
|
| 9 |
class ContentPart(BaseModel):
|
| 10 |
"""Content part model for OpenAI's new content format"""
|
|
|
|
| 11 |
type: str
|
| 12 |
text: Optional[str] = None
|
| 13 |
|
| 14 |
|
| 15 |
class Message(BaseModel):
|
| 16 |
"""Chat message model"""
|
|
|
|
| 17 |
role: str
|
| 18 |
content: Optional[Union[str, List[ContentPart]]] = None
|
| 19 |
reasoning_content: Optional[str] = None
|
|
@@ -22,6 +24,7 @@ class Message(BaseModel):
|
|
| 22 |
|
| 23 |
class OpenAIRequest(BaseModel):
|
| 24 |
"""OpenAI-compatible request model"""
|
|
|
|
| 25 |
model: str
|
| 26 |
messages: List[Message]
|
| 27 |
stream: Optional[bool] = False
|
|
@@ -33,6 +36,7 @@ class OpenAIRequest(BaseModel):
|
|
| 33 |
|
| 34 |
class ModelItem(BaseModel):
|
| 35 |
"""Model information item"""
|
|
|
|
| 36 |
id: str
|
| 37 |
name: str
|
| 38 |
owned_by: str
|
|
@@ -40,6 +44,7 @@ class ModelItem(BaseModel):
|
|
| 40 |
|
| 41 |
class UpstreamRequest(BaseModel):
|
| 42 |
"""Upstream service request model"""
|
|
|
|
| 43 |
stream: bool
|
| 44 |
model: str
|
| 45 |
messages: List[Message]
|
|
@@ -52,19 +57,21 @@ class UpstreamRequest(BaseModel):
|
|
| 52 |
model_item: Optional[ModelItem] = None
|
| 53 |
tool_servers: Optional[List[str]] = None
|
| 54 |
variables: Optional[Dict[str, str]] = None
|
| 55 |
-
model_config = {
|
| 56 |
|
| 57 |
|
| 58 |
class Delta(BaseModel):
|
| 59 |
"""Stream delta model"""
|
|
|
|
| 60 |
role: Optional[str] = None
|
| 61 |
-
content: Optional[str] = None
|
| 62 |
reasoning_content: Optional[str] = None
|
| 63 |
tool_calls: Optional[List[Dict[str, Any]]] = None
|
| 64 |
|
| 65 |
|
| 66 |
class Choice(BaseModel):
|
| 67 |
"""Response choice model"""
|
|
|
|
| 68 |
index: int
|
| 69 |
message: Optional[Message] = None
|
| 70 |
delta: Optional[Delta] = None
|
|
@@ -73,6 +80,7 @@ class Choice(BaseModel):
|
|
| 73 |
|
| 74 |
class Usage(BaseModel):
|
| 75 |
"""Token usage statistics"""
|
|
|
|
| 76 |
prompt_tokens: int = 0
|
| 77 |
completion_tokens: int = 0
|
| 78 |
total_tokens: int = 0
|
|
@@ -80,6 +88,7 @@ class Usage(BaseModel):
|
|
| 80 |
|
| 81 |
class OpenAIResponse(BaseModel):
|
| 82 |
"""OpenAI-compatible response model"""
|
|
|
|
| 83 |
id: str
|
| 84 |
object: str
|
| 85 |
created: int
|
|
@@ -90,17 +99,20 @@ class OpenAIResponse(BaseModel):
|
|
| 90 |
|
| 91 |
class UpstreamError(BaseModel):
|
| 92 |
"""Upstream error model"""
|
|
|
|
| 93 |
detail: str
|
| 94 |
code: int
|
| 95 |
|
| 96 |
|
| 97 |
class UpstreamDataInner(BaseModel):
|
| 98 |
"""Inner upstream data model"""
|
|
|
|
| 99 |
error: Optional[UpstreamError] = None
|
| 100 |
|
| 101 |
|
| 102 |
class UpstreamDataData(BaseModel):
|
| 103 |
"""Upstream data content model"""
|
|
|
|
| 104 |
delta_content: str = ""
|
| 105 |
edit_content: str = ""
|
| 106 |
phase: str = ""
|
|
@@ -112,6 +124,7 @@ class UpstreamDataData(BaseModel):
|
|
| 112 |
|
| 113 |
class UpstreamData(BaseModel):
|
| 114 |
"""Upstream data model"""
|
|
|
|
| 115 |
type: str
|
| 116 |
data: UpstreamDataData
|
| 117 |
error: Optional[UpstreamError] = None
|
|
@@ -119,6 +132,7 @@ class UpstreamData(BaseModel):
|
|
| 119 |
|
| 120 |
class Model(BaseModel):
|
| 121 |
"""Model information for listing"""
|
|
|
|
| 122 |
id: str
|
| 123 |
object: str = "model"
|
| 124 |
created: int
|
|
@@ -127,7 +141,6 @@ class Model(BaseModel):
|
|
| 127 |
|
| 128 |
class ModelsResponse(BaseModel):
|
| 129 |
"""Models list response model"""
|
|
|
|
| 130 |
object: str = "list"
|
| 131 |
data: List[Model]
|
| 132 |
-
|
| 133 |
-
|
|
|
|
| 8 |
|
| 9 |
class ContentPart(BaseModel):
|
| 10 |
"""Content part model for OpenAI's new content format"""
|
| 11 |
+
|
| 12 |
type: str
|
| 13 |
text: Optional[str] = None
|
| 14 |
|
| 15 |
|
| 16 |
class Message(BaseModel):
|
| 17 |
"""Chat message model"""
|
| 18 |
+
|
| 19 |
role: str
|
| 20 |
content: Optional[Union[str, List[ContentPart]]] = None
|
| 21 |
reasoning_content: Optional[str] = None
|
|
|
|
| 24 |
|
| 25 |
class OpenAIRequest(BaseModel):
|
| 26 |
"""OpenAI-compatible request model"""
|
| 27 |
+
|
| 28 |
model: str
|
| 29 |
messages: List[Message]
|
| 30 |
stream: Optional[bool] = False
|
|
|
|
| 36 |
|
| 37 |
class ModelItem(BaseModel):
|
| 38 |
"""Model information item"""
|
| 39 |
+
|
| 40 |
id: str
|
| 41 |
name: str
|
| 42 |
owned_by: str
|
|
|
|
| 44 |
|
| 45 |
class UpstreamRequest(BaseModel):
|
| 46 |
"""Upstream service request model"""
|
| 47 |
+
|
| 48 |
stream: bool
|
| 49 |
model: str
|
| 50 |
messages: List[Message]
|
|
|
|
| 57 |
model_item: Optional[ModelItem] = None
|
| 58 |
tool_servers: Optional[List[str]] = None
|
| 59 |
variables: Optional[Dict[str, str]] = None
|
| 60 |
+
model_config = {"protected_namespaces": ()}
|
| 61 |
|
| 62 |
|
| 63 |
class Delta(BaseModel):
|
| 64 |
"""Stream delta model"""
|
| 65 |
+
|
| 66 |
role: Optional[str] = None
|
| 67 |
+
content: Optional[str] = "" or None
|
| 68 |
reasoning_content: Optional[str] = None
|
| 69 |
tool_calls: Optional[List[Dict[str, Any]]] = None
|
| 70 |
|
| 71 |
|
| 72 |
class Choice(BaseModel):
|
| 73 |
"""Response choice model"""
|
| 74 |
+
|
| 75 |
index: int
|
| 76 |
message: Optional[Message] = None
|
| 77 |
delta: Optional[Delta] = None
|
|
|
|
| 80 |
|
| 81 |
class Usage(BaseModel):
|
| 82 |
"""Token usage statistics"""
|
| 83 |
+
|
| 84 |
prompt_tokens: int = 0
|
| 85 |
completion_tokens: int = 0
|
| 86 |
total_tokens: int = 0
|
|
|
|
| 88 |
|
| 89 |
class OpenAIResponse(BaseModel):
|
| 90 |
"""OpenAI-compatible response model"""
|
| 91 |
+
|
| 92 |
id: str
|
| 93 |
object: str
|
| 94 |
created: int
|
|
|
|
| 99 |
|
| 100 |
class UpstreamError(BaseModel):
|
| 101 |
"""Upstream error model"""
|
| 102 |
+
|
| 103 |
detail: str
|
| 104 |
code: int
|
| 105 |
|
| 106 |
|
| 107 |
class UpstreamDataInner(BaseModel):
|
| 108 |
"""Inner upstream data model"""
|
| 109 |
+
|
| 110 |
error: Optional[UpstreamError] = None
|
| 111 |
|
| 112 |
|
| 113 |
class UpstreamDataData(BaseModel):
|
| 114 |
"""Upstream data content model"""
|
| 115 |
+
|
| 116 |
delta_content: str = ""
|
| 117 |
edit_content: str = ""
|
| 118 |
phase: str = ""
|
|
|
|
| 124 |
|
| 125 |
class UpstreamData(BaseModel):
|
| 126 |
"""Upstream data model"""
|
| 127 |
+
|
| 128 |
type: str
|
| 129 |
data: UpstreamDataData
|
| 130 |
error: Optional[UpstreamError] = None
|
|
|
|
| 132 |
|
| 133 |
class Model(BaseModel):
|
| 134 |
"""Model information for listing"""
|
| 135 |
+
|
| 136 |
id: str
|
| 137 |
object: str = "model"
|
| 138 |
created: int
|
|
|
|
| 141 |
|
| 142 |
class ModelsResponse(BaseModel):
|
| 143 |
"""Models list response model"""
|
| 144 |
+
|
| 145 |
object: str = "list"
|
| 146 |
data: List[Model]
|
|
|
|
|
|
app/utils/helpers.py
CHANGED
|
@@ -5,11 +5,23 @@ Utility functions for the application
|
|
| 5 |
import json
|
| 6 |
import re
|
| 7 |
import time
|
|
|
|
| 8 |
from typing import Dict, List, Optional, Any, Tuple, Generator
|
| 9 |
import requests
|
|
|
|
| 10 |
|
| 11 |
from app.core.config import settings
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
def debug_log(message: str, *args) -> None:
|
| 15 |
"""Log debug message if debug mode is enabled"""
|
|
@@ -29,12 +41,84 @@ def generate_request_ids() -> Tuple[str, str]:
|
|
| 29 |
|
| 30 |
|
| 31 |
def get_browser_headers(referer_chat_id: str = "") -> Dict[str, str]:
|
| 32 |
-
"""Get browser headers for API requests"""
|
| 33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
if referer_chat_id:
|
| 36 |
headers["Referer"] = f"{settings.CLIENT_HEADERS['Origin']}/c/{referer_chat_id}"
|
| 37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
return headers
|
| 39 |
|
| 40 |
|
|
|
|
| 5 |
import json
|
| 6 |
import re
|
| 7 |
import time
|
| 8 |
+
import random
|
| 9 |
from typing import Dict, List, Optional, Any, Tuple, Generator
|
| 10 |
import requests
|
| 11 |
+
from fake_useragent import UserAgent
|
| 12 |
|
| 13 |
from app.core.config import settings
|
| 14 |
|
| 15 |
+
# 全局 UserAgent 实例,避免每次调用都创建新实例
|
| 16 |
+
_user_agent_instance = None
|
| 17 |
+
|
| 18 |
+
def get_user_agent_instance() -> UserAgent:
|
| 19 |
+
"""获取或创建 UserAgent 实例(单例模式)"""
|
| 20 |
+
global _user_agent_instance
|
| 21 |
+
if _user_agent_instance is None:
|
| 22 |
+
_user_agent_instance = UserAgent()
|
| 23 |
+
return _user_agent_instance
|
| 24 |
+
|
| 25 |
|
| 26 |
def debug_log(message: str, *args) -> None:
|
| 27 |
"""Log debug message if debug mode is enabled"""
|
|
|
|
| 41 |
|
| 42 |
|
| 43 |
def get_browser_headers(referer_chat_id: str = "") -> Dict[str, str]:
|
| 44 |
+
"""Get browser headers for API requests with dynamic User-Agent"""
|
| 45 |
+
|
| 46 |
+
# 获取 UserAgent 实例
|
| 47 |
+
ua = get_user_agent_instance()
|
| 48 |
+
|
| 49 |
+
# 随机选择一个浏览器类型,偏向使用 Chrome 和 Edge
|
| 50 |
+
browser_choices = ['chrome', 'chrome', 'chrome', 'edge', 'edge', 'firefox', 'safari']
|
| 51 |
+
browser_type = random.choice(browser_choices)
|
| 52 |
+
|
| 53 |
+
try:
|
| 54 |
+
# 根据浏览器类型获取 User-Agent
|
| 55 |
+
if browser_type == 'chrome':
|
| 56 |
+
user_agent = ua.chrome
|
| 57 |
+
elif browser_type == 'edge':
|
| 58 |
+
user_agent = ua.edge
|
| 59 |
+
elif browser_type == 'firefox':
|
| 60 |
+
user_agent = ua.firefox
|
| 61 |
+
elif browser_type == 'safari':
|
| 62 |
+
user_agent = ua.safari
|
| 63 |
+
else:
|
| 64 |
+
user_agent = ua.random
|
| 65 |
+
except:
|
| 66 |
+
# 如果获取失败,使用随机 User-Agent
|
| 67 |
+
user_agent = ua.random
|
| 68 |
+
|
| 69 |
+
# 提取浏览器版本信息
|
| 70 |
+
chrome_version = "139" # 默认版本
|
| 71 |
+
edge_version = "139"
|
| 72 |
|
| 73 |
+
if "Chrome/" in user_agent:
|
| 74 |
+
try:
|
| 75 |
+
chrome_version = user_agent.split("Chrome/")[1].split(".")[0]
|
| 76 |
+
except:
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
if "Edg/" in user_agent:
|
| 80 |
+
try:
|
| 81 |
+
edge_version = user_agent.split("Edg/")[1].split(".")[0]
|
| 82 |
+
# Edge 基于 Chromium,使用 Edge 特定的 sec-ch-ua
|
| 83 |
+
sec_ch_ua = f'"Microsoft Edge";v="{edge_version}", "Chromium";v="{chrome_version}", "Not_A Brand";v="24"'
|
| 84 |
+
except:
|
| 85 |
+
sec_ch_ua = f'"Not_A Brand";v="8", "Chromium";v="{chrome_version}", "Google Chrome";v="{chrome_version}"'
|
| 86 |
+
elif "Firefox/" in user_agent:
|
| 87 |
+
# Firefox 不使用 sec-ch-ua
|
| 88 |
+
sec_ch_ua = None
|
| 89 |
+
else:
|
| 90 |
+
# Chrome 或其他基于 Chromium 的浏览器
|
| 91 |
+
sec_ch_ua = f'"Not_A Brand";v="8", "Chromium";v="{chrome_version}", "Google Chrome";v="{chrome_version}"'
|
| 92 |
+
|
| 93 |
+
# 构建动态 Headers
|
| 94 |
+
headers = {
|
| 95 |
+
"Content-Type": "application/json",
|
| 96 |
+
"Accept": "application/json, text/event-stream",
|
| 97 |
+
"User-Agent": user_agent,
|
| 98 |
+
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-US;q=0.7",
|
| 99 |
+
"sec-ch-ua-mobile": "?0",
|
| 100 |
+
"sec-ch-ua-platform": '"Windows"',
|
| 101 |
+
"sec-fetch-dest": "empty",
|
| 102 |
+
"sec-fetch-mode": "cors",
|
| 103 |
+
"sec-fetch-site": "same-origin",
|
| 104 |
+
"X-FE-Version": "prod-fe-1.0.70",
|
| 105 |
+
"Origin": settings.CLIENT_HEADERS["Origin"],
|
| 106 |
+
"Cache-Control": "no-cache",
|
| 107 |
+
"Pragma": "no-cache",
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
# 只有基于 Chromium 的浏览器才添加 sec-ch-ua
|
| 111 |
+
if sec_ch_ua:
|
| 112 |
+
headers["sec-ch-ua"] = sec_ch_ua
|
| 113 |
+
|
| 114 |
+
# 添加 Referer
|
| 115 |
if referer_chat_id:
|
| 116 |
headers["Referer"] = f"{settings.CLIENT_HEADERS['Origin']}/c/{referer_chat_id}"
|
| 117 |
|
| 118 |
+
# 调试日志
|
| 119 |
+
if settings.DEBUG_LOGGING:
|
| 120 |
+
debug_log(f"使用 User-Agent: {user_agent[:100]}...")
|
| 121 |
+
|
| 122 |
return headers
|
| 123 |
|
| 124 |
|