Rfym21 commited on
Commit
4e9117c
·
verified ·
1 Parent(s): f33fcab

Delete chatgpt

Browse files
chatgpt/authorization.py DELETED
@@ -1,101 +0,0 @@
1
- import asyncio
2
- import json
3
- import os
4
- import random
5
-
6
- import ua_generator
7
- from fastapi import HTTPException
8
-
9
- import chatgpt.globals as globals
10
- from chatgpt.refreshToken import rt2ac
11
- from utils.Logger import logger
12
- from utils.config import authorization_list, random_token
13
-
14
- os.environ['PYTHONHASHSEED'] = '0'
15
- random.seed(0)
16
-
17
-
18
- def get_req_token(req_token, seed=None):
19
- available_token_list = list(set(globals.token_list) - set(globals.error_token_list))
20
- length = len(available_token_list)
21
- if seed and length > 0:
22
- req_token = globals.token_list[hash(seed) % length]
23
- while req_token in globals.error_token_list:
24
- req_token = random.choice(globals.token_list)
25
- return req_token
26
-
27
- if req_token in authorization_list:
28
- if len(available_token_list) > 0:
29
- if random_token:
30
- req_token = random.choice(available_token_list)
31
- return req_token
32
- else:
33
- globals.count += 1
34
- globals.count %= length
35
- return available_token_list[globals.count]
36
- else:
37
- return None
38
- else:
39
- return req_token
40
-
41
-
42
- def get_ua(req_token):
43
- user_agent = globals.user_agent_map.get(req_token, "")
44
- user_agent = {k.lower(): v for k, v in user_agent.items()}
45
- if not user_agent:
46
- if not req_token:
47
- ua = ua_generator.generate(device='desktop', browser=('chrome', 'edge'), platform=('windows', 'macos'))
48
- return {
49
- "user-agent": ua.text,
50
- "sec-ch-ua-platform": ua.platform,
51
- "sec-ch-ua": ua.ch.brands,
52
- "sec-ch-ua-mobile": ua.ch.mobile,
53
- "impersonate": random.choice(globals.impersonate_list),
54
- }
55
- else:
56
- ua = ua_generator.generate(device='desktop', browser=('chrome', 'edge'), platform=('windows', 'macos'))
57
- user_agent = {
58
- "user-agent": ua.text,
59
- "sec-ch-ua-platform": ua.platform,
60
- "sec-ch-ua": ua.ch.brands,
61
- "sec-ch-ua-mobile": ua.ch.mobile,
62
- "impersonate": random.choice(globals.impersonate_list),
63
- }
64
- globals.user_agent_map[req_token] = user_agent
65
- with open(globals.USER_AGENTS_FILE, "a", encoding="utf-8") as f:
66
- f.write(json.dumps({req_token: user_agent}, indent=4))
67
- return user_agent
68
- else:
69
- return user_agent
70
-
71
-
72
- async def verify_token(req_token):
73
- if not req_token:
74
- if authorization_list:
75
- logger.error("Unauthorized with empty token.")
76
- raise HTTPException(status_code=401)
77
- else:
78
- return None
79
- else:
80
- if req_token.startswith("eyJhbGciOi") or req_token.startswith("fk-"):
81
- access_token = req_token
82
- return access_token
83
- elif len(req_token) == 45:
84
- try:
85
- access_token = await rt2ac(req_token, force_refresh=False)
86
- return access_token
87
- except HTTPException as e:
88
- raise HTTPException(status_code=e.status_code, detail=e.detail)
89
- else:
90
- return req_token
91
-
92
-
93
- async def refresh_all_tokens(force_refresh=False):
94
- for token in list(set(globals.token_list) - set(globals.error_token_list)):
95
- if len(token) == 45:
96
- try:
97
- await asyncio.sleep(2)
98
- await rt2ac(token, force_refresh=force_refresh)
99
- except HTTPException:
100
- pass
101
- logger.info("All tokens refreshed.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chatgpt/chatLimit.py DELETED
@@ -1,35 +0,0 @@
1
- import threading
2
- import time
3
- from datetime import datetime
4
-
5
- from utils.Logger import logger
6
-
7
- limit_details = {}
8
-
9
-
10
- def check_is_limit(detail, token, model):
11
- if token and isinstance(detail, dict) and detail.get('clears_in'):
12
- clear_time = int(time.time()) + detail.get('clears_in')
13
- limit_details.setdefault(token, {})[model] = clear_time
14
- logger.info(f"{token[:40]}: Reached {model} limit, will be cleared at {datetime.fromtimestamp(clear_time).replace(microsecond=0)}")
15
-
16
-
17
- async def handle_request_limit(token, model):
18
- try:
19
- if limit_details.get(token) and model in limit_details[token]:
20
- limit_time = limit_details[token][model]
21
- is_limit = limit_time > int(time.time())
22
- if is_limit:
23
- clear_date = datetime.fromtimestamp(limit_time).replace(microsecond=0)
24
- result = f"Request limit exceeded. You can continue with the default model now, or try again after {clear_date}"
25
- logger.info(result)
26
- return result
27
- else:
28
- del limit_details[token][model]
29
- return None
30
- except KeyError as e:
31
- logger.error(f"Key error: {e}")
32
- return None
33
- except Exception as e:
34
- logger.error(f"An unexpected error occurred: {e}")
35
- return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chatgpt/globals.py DELETED
@@ -1,105 +0,0 @@
1
- import json
2
- import os
3
-
4
- import ua_generator
5
- import random
6
-
7
- from utils.Logger import logger
8
-
9
- DATA_FOLDER = "data"
10
- TOKENS_FILE = os.path.join(DATA_FOLDER, "token.txt")
11
- REFRESH_MAP_FILE = os.path.join(DATA_FOLDER, "refresh_map.json")
12
- ERROR_TOKENS_FILE = os.path.join(DATA_FOLDER, "error_token.txt")
13
- WSS_MAP_FILE = os.path.join(DATA_FOLDER, "wss_map.json")
14
- USER_AGENTS_FILE = os.path.join(DATA_FOLDER, "user_agents.json")
15
-
16
- count = 0
17
- token_list = []
18
- error_token_list = []
19
- refresh_map = {}
20
- wss_map = {}
21
- user_agent_map = {}
22
- impersonate_list = [
23
- "chrome99",
24
- "chrome100",
25
- "chrome101",
26
- "chrome104",
27
- "chrome107",
28
- "chrome110",
29
- "chrome116",
30
- "chrome119",
31
- "chrome120",
32
- "chrome123",
33
- "edge99",
34
- "edge101",
35
- ]
36
-
37
- if not os.path.exists(DATA_FOLDER):
38
- os.makedirs(DATA_FOLDER)
39
-
40
- if os.path.exists(REFRESH_MAP_FILE):
41
- with open(REFRESH_MAP_FILE, "r") as file:
42
- refresh_map = json.load(file)
43
- else:
44
- refresh_map = {}
45
-
46
- if os.path.exists(WSS_MAP_FILE):
47
- with open(WSS_MAP_FILE, "r") as file:
48
- wss_map = json.load(file)
49
- else:
50
- wss_map = {}
51
-
52
-
53
- if os.path.exists(TOKENS_FILE):
54
- with open(TOKENS_FILE, "r", encoding="utf-8") as f:
55
- for line in f:
56
- if line.strip() and not line.startswith("#"):
57
- token_list.append(line.strip())
58
- else:
59
- with open(TOKENS_FILE, "w", encoding="utf-8") as f:
60
- pass
61
-
62
-
63
- if os.path.exists(ERROR_TOKENS_FILE):
64
- with open(ERROR_TOKENS_FILE, "r", encoding="utf-8") as f:
65
- for line in f:
66
- if line.strip() and not line.startswith("#"):
67
- error_token_list.append(line.strip())
68
- else:
69
- with open(ERROR_TOKENS_FILE, "w", encoding="utf-8") as f:
70
- pass
71
-
72
- if os.path.exists(USER_AGENTS_FILE):
73
- with open(USER_AGENTS_FILE, "r", encoding="utf-8") as f:
74
- user_agent_map = json.load(f)
75
- # token数量变化时,更新ua
76
- if len(user_agent_map.keys()) != len(token_list):
77
- new_tokens = list(set(token_list) - user_agent_map.keys())
78
- for token in new_tokens:
79
- ua = ua_generator.generate(device='desktop', browser=('chrome', 'edge'), platform=('windows', 'macos'))
80
- ua_dict = {
81
- "user-agent": ua.text,
82
- "sec-ch-ua-platform": ua.platform,
83
- "sec-ch-ua": ua.ch.brands,
84
- "sec-ch-ua-mobile": ua.ch.mobile,
85
- "impersonate": random.choice(impersonate_list),
86
- }
87
- user_agent_map[token] = ua_dict
88
- with open(USER_AGENTS_FILE, "w", encoding="utf-8") as f:
89
- f.write(json.dumps(user_agent_map, indent=4))
90
- else:
91
- for token in token_list:
92
- ua = ua_generator.generate(device='desktop', browser=('chrome', 'edge'), platform=('windows', 'macos'))
93
- ua_dict = {
94
- "user-agent": ua.text,
95
- "sec-ch-ua-platform": ua.platform,
96
- "sec-ch-ua": ua.ch.brands,
97
- "sec-ch-ua-mobile": ua.ch.mobile,
98
- "impersonate": random.choice(impersonate_list),
99
- }
100
- user_agent_map[token] = ua_dict
101
- with open(USER_AGENTS_FILE, "w", encoding="utf-8") as f:
102
- f.write(json.dumps(user_agent_map, indent=4))
103
-
104
- if token_list:
105
- logger.info(f"Token list count: {len(token_list)}, Error token list count: {len(error_token_list)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chatgpt/refreshToken.py DELETED
@@ -1,61 +0,0 @@
1
- import json
2
- import random
3
- import time
4
-
5
- from fastapi import HTTPException
6
-
7
- from utils.Client import Client
8
- from utils.Logger import logger
9
- from utils.config import proxy_url_list
10
- import chatgpt.globals as globals
11
-
12
-
13
- def save_refresh_map(refresh_map):
14
- with open(globals.REFRESH_MAP_FILE, "w") as file:
15
- json.dump(refresh_map, file)
16
-
17
-
18
- async def rt2ac(refresh_token, force_refresh=False):
19
- if not force_refresh and (refresh_token in globals.refresh_map and int(time.time()) - globals.refresh_map.get(refresh_token, {}).get("timestamp", 0) < 5 * 24 * 60 * 60):
20
- access_token = globals.refresh_map[refresh_token]["token"]
21
- logger.info(f"refresh_token -> access_token from cache")
22
- return access_token
23
- else:
24
- try:
25
- access_token = await chat_refresh(refresh_token)
26
- globals.refresh_map[refresh_token] = {"token": access_token, "timestamp": int(time.time())}
27
- save_refresh_map(globals.refresh_map)
28
- logger.info(f"refresh_token -> access_token with openai: {access_token}")
29
- return access_token
30
- except HTTPException as e:
31
- raise HTTPException(status_code=e.status_code, detail=e.detail)
32
-
33
-
34
- async def chat_refresh(refresh_token):
35
- data = {
36
- "client_id": "pdlLIX2Y72MIl2rhLhTE9VV9bN905kBh",
37
- "grant_type": "refresh_token",
38
- "redirect_uri": "com.openai.chat://auth0.openai.com/ios/com.openai.chat/callback",
39
- "refresh_token": refresh_token
40
- }
41
- client = Client(proxy=random.choice(proxy_url_list) if proxy_url_list else None)
42
- try:
43
- r = await client.post("https://auth0.openai.com/oauth/token", json=data, timeout=5)
44
- if r.status_code == 200:
45
- access_token = r.json()['access_token']
46
- return access_token
47
- else:
48
- if "invalid_grant" in r.text or "access_denied" in r.text:
49
- if refresh_token not in globals.error_token_list:
50
- globals.error_token_list.append(refresh_token)
51
- with open(globals.ERROR_TOKENS_FILE, "a", encoding="utf-8") as f:
52
- f.write(refresh_token + "\n")
53
- raise Exception(r.text)
54
- else:
55
- raise Exception(r.text[:300])
56
- except Exception as e:
57
- logger.error(f"Failed to refresh access_token `{refresh_token}`: {str(e)}")
58
- raise HTTPException(status_code=500, detail=f"Failed to refresh access_token.")
59
- finally:
60
- await client.close()
61
- del client
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chatgpt/reverseProxy.py DELETED
@@ -1,159 +0,0 @@
1
- import json
2
- import random
3
-
4
- from fastapi import Request, HTTPException
5
- from fastapi.responses import StreamingResponse, Response
6
- from starlette.background import BackgroundTask
7
-
8
- from chatgpt.authorization import verify_token, get_req_token, get_ua
9
- from utils.Client import Client
10
- from utils.config import chatgpt_base_url_list, proxy_url_list, enable_gateway
11
-
12
- headers_reject_list = [
13
- "x-real-ip",
14
- "x-forwarded-for",
15
- "x-forwarded-proto",
16
- "x-forwarded-port",
17
- "x-forwarded-host",
18
- "x-forwarded-server",
19
- "cf-warp-tag-id",
20
- "cf-visitor",
21
- "cf-ray",
22
- "cf-connecting-ip",
23
- "cf-ipcountry",
24
- "cdn-loop",
25
- "remote-host",
26
- "x-frame-options",
27
- "x-xss-protection",
28
- "x-content-type-options",
29
- "content-security-policy",
30
- "host",
31
- "cookie",
32
- "connection",
33
- "content-length",
34
- "content-encoding",
35
- "x-middleware-prefetch",
36
- "x-nextjs-data",
37
- "purpose",
38
- "x-forwarded-uri",
39
- "x-forwarded-path",
40
- "x-forwarded-method",
41
- "x-forwarded-protocol",
42
- "x-forwarded-scheme",
43
- "cf-request-id",
44
- "cf-worker",
45
- "cf-access-client-id",
46
- "cf-access-client-device-type",
47
- "cf-access-client-device-model",
48
- "cf-access-client-device-name",
49
- "cf-access-client-device-brand",
50
- "x-middleware-prefetch",
51
- "x-forwarded-for",
52
- "x-forwarded-host",
53
- "x-forwarded-proto",
54
- "x-forwarded-server",
55
- "x-real-ip",
56
- "x-forwarded-port",
57
- "cf-connecting-ip",
58
- "cf-ipcountry",
59
- "cf-ray",
60
- "cf-visitor",
61
- ]
62
-
63
-
64
- async def get_real_req_token(token):
65
- req_token = get_req_token(token)
66
- if len(req_token) == 45 or req_token.startswith("eyJhbGciOi"):
67
- return req_token
68
- else:
69
- req_token = get_req_token(None, token)
70
- return req_token
71
-
72
-
73
- async def chatgpt_reverse_proxy(request: Request, path: str):
74
- try:
75
- origin_host = request.url.netloc
76
- if request.url.is_secure:
77
- petrol = "https"
78
- else:
79
- petrol = "http"
80
- if "x-forwarded-proto" in request.headers:
81
- petrol = request.headers["x-forwarded-proto"]
82
- if "cf-visitor" in request.headers:
83
- cf_visitor = json.loads(request.headers["cf-visitor"])
84
- petrol = cf_visitor.get("scheme", petrol)
85
-
86
- params = dict(request.query_params)
87
- request_cookies = dict(request.cookies)
88
-
89
- headers = {
90
- key: value for key, value in request.headers.items()
91
- if (key.lower() not in ["host", "origin", "referer", "priority", "oai-device-id"] and key.lower() not in headers_reject_list)
92
- }
93
-
94
- base_url = random.choice(chatgpt_base_url_list) if chatgpt_base_url_list else "https://chatgpt.com"
95
- if "assets/" in path:
96
- base_url = "https://cdn.oaistatic.com"
97
- if "file-" in path and "backend-api" not in path:
98
- base_url = "https://files.oaiusercontent.com"
99
-
100
- token = request.cookies.get("token")
101
- req_token = await get_real_req_token(token)
102
- ua = get_ua(req_token)
103
- headers.update(ua)
104
-
105
- headers.update({
106
- "accept-language": "en-US,en;q=0.9",
107
- "host": base_url.replace("https://", "").replace("http://", ""),
108
- "origin": base_url,
109
- "referer": f"{base_url}/"
110
- })
111
-
112
- token = headers.get("authorization", "").replace("Bearer ", "")
113
- if token:
114
- req_token = await get_real_req_token(token)
115
- access_token = await verify_token(req_token)
116
- headers.update({"authorization": access_token})
117
-
118
- data = await request.body()
119
-
120
- client = Client(proxy=random.choice(proxy_url_list) if proxy_url_list else None)
121
- try:
122
- background = BackgroundTask(client.close)
123
- r = await client.request(request.method, f"{base_url}/{path}", params=params, headers=headers,
124
- cookies=request_cookies, data=data, stream=True, allow_redirects=False)
125
-
126
- if r.status_code == 302:
127
- return Response(status_code=302,
128
- headers={"Location": r.headers.get("Location").replace("chatgpt.com", origin_host)
129
- .replace("cdn.oaistatic.com", origin_host)
130
- .replace("https", petrol)}, background=background)
131
- elif 'stream' in r.headers.get("content-type", ""):
132
- return StreamingResponse(r.aiter_content(), media_type=r.headers.get("content-type", ""),
133
- background=background)
134
- else:
135
- if "/backend-api/conversation" in path or "/register-websocket" in path:
136
- response = Response(content=(await r.atext()), media_type=r.headers.get("content-type"),
137
- status_code=r.status_code, background=background)
138
- else:
139
- content = ((await r.atext()).replace("chatgpt.com", origin_host)
140
- .replace("cdn.oaistatic.com", origin_host)
141
- # .replace("files.oaiusercontent.com", origin_host)
142
- .replace("https", petrol))
143
- rheaders = dict(r.headers)
144
- content_type = rheaders.get("content-type", "")
145
- cache_control = rheaders.get("cache-control", "")
146
- expires = rheaders.get("expires", "")
147
- rheaders = {
148
- "cache-control": cache_control,
149
- "content-type": content_type,
150
- "expires": expires
151
- }
152
- response = Response(content=content, headers=rheaders,
153
- status_code=r.status_code, background=background)
154
- return response
155
- except Exception:
156
- await client.close()
157
-
158
- except Exception as e:
159
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chatgpt/turnstile.py DELETED
@@ -1,268 +0,0 @@
1
- import pybase64
2
- import json
3
- import random
4
- import time
5
- from typing import Any, Callable, Dict, List, Union
6
-
7
-
8
- class OrderedMap:
9
- def __init__(self):
10
- self.keys = []
11
- self.values = {}
12
-
13
- def add(self, key: str, value: Any):
14
- if key not in self.values:
15
- self.keys.append(key)
16
- self.values[key] = value
17
-
18
- def to_json(self):
19
- return json.dumps({k: self.values[k] for k in self.keys})
20
-
21
-
22
- TurnTokenList = List[List[Any]]
23
- FloatMap = Dict[float, Any]
24
- StringMap = Dict[str, Any]
25
- FuncType = Callable[..., Any]
26
-
27
-
28
- def get_turnstile_token(dx: str, p: str) -> Union[str, None]:
29
- try:
30
- decoded_bytes = pybase64.b64decode(dx)
31
- return process_turnstile_token(decoded_bytes.decode(), p)
32
- except Exception as e:
33
- print(f"Error in get_turnstile_token: {e}")
34
- return None
35
-
36
-
37
- def process_turnstile_token(dx: str, p: str) -> str:
38
- result = []
39
- p_length = len(p)
40
- if p_length != 0:
41
- for i, r in enumerate(dx):
42
- result.append(chr(ord(r) ^ ord(p[i % p_length])))
43
- else:
44
- result = list(dx)
45
- return ''.join(result)
46
-
47
-
48
- def is_slice(input_val: Any) -> bool:
49
- return isinstance(input_val, (list, tuple))
50
-
51
-
52
- def is_float(input_val: Any) -> bool:
53
- return isinstance(input_val, float)
54
-
55
-
56
- def is_string(input_val: Any) -> bool:
57
- return isinstance(input_val, str)
58
-
59
-
60
- def to_str(input_val: Any) -> str:
61
- if input_val is None:
62
- return "undefined"
63
- elif is_float(input_val):
64
- return str(input_val)
65
- elif is_string(input_val):
66
- special_cases = {
67
- "window.Math": "[object Math]",
68
- "window.Reflect": "[object Reflect]",
69
- "window.performance": "[object Performance]",
70
- "window.localStorage": "[object Storage]",
71
- "window.Object": "function Object() { [native code] }",
72
- "window.Reflect.set": "function set() { [native code] }",
73
- "window.performance.now": "function () { [native code] }",
74
- "window.Object.create": "function create() { [native code] }",
75
- "window.Object.keys": "function keys() { [native code] }",
76
- "window.Math.random": "function random() { [native code] }"
77
- }
78
- return special_cases.get(input_val, input_val)
79
- elif isinstance(input_val, list) and all(isinstance(item, str) for item in input_val):
80
- return ','.join(input_val)
81
- else:
82
- return str(input_val)
83
-
84
-
85
- def get_func_map() -> FloatMap:
86
- process_map: FloatMap = {}
87
-
88
- def func_1(e: float, t: float):
89
- e_str = to_str(process_map[e])
90
- t_str = to_str(process_map[t])
91
- res = process_turnstile_token(e_str, t_str)
92
- process_map[e] = res
93
-
94
- def func_2(e: float, t: Any):
95
- process_map[e] = t
96
-
97
- def func_5(e: float, t: float):
98
- n = process_map[e]
99
- tres = process_map[t]
100
- if is_slice(n):
101
- nt = n + [tres]
102
- process_map[e] = nt
103
- else:
104
- if is_string(n) or is_string(tres):
105
- res = to_str(n) + to_str(tres)
106
- elif is_float(n) and is_float(tres):
107
- res = n + tres
108
- else:
109
- res = "NaN"
110
- process_map[e] = res
111
-
112
- def func_6(e: float, t: float, n: float):
113
- tv = process_map[t]
114
- nv = process_map[n]
115
- if is_string(tv) and is_string(nv):
116
- res = f"{tv}.{nv}"
117
- if res == "window.document.location":
118
- process_map[e] = "https://chatgpt.com/"
119
- else:
120
- process_map[e] = res
121
- else:
122
- print("func type 6 error")
123
-
124
- def func_24(e: float, t: float, n: float):
125
- tv = process_map[t]
126
- nv = process_map[n]
127
- if is_string(tv) and is_string(nv):
128
- process_map[e] = f"{tv}.{nv}"
129
- else:
130
- print("func type 24 error")
131
-
132
- def func_7(e: float, *args):
133
- n = [process_map[arg] for arg in args]
134
- ev = process_map[e]
135
- if isinstance(ev, str):
136
- if ev == "window.Reflect.set":
137
- obj = n[0]
138
- key_str = str(n[1])
139
- val = n[2]
140
- obj.add(key_str, val)
141
- elif callable(ev):
142
- ev(*n)
143
-
144
- def func_17(e: float, t: float, *args):
145
- i = [process_map[arg] for arg in args]
146
- tv = process_map[t]
147
- res = None
148
- if isinstance(tv, str):
149
- if tv == "window.performance.now":
150
- current_time = time.time_ns()
151
- elapsed_ns = current_time - int(start_time * 1e9)
152
- res = (elapsed_ns + random.random()) / 1e6
153
- elif tv == "window.Object.create":
154
- res = OrderedMap()
155
- elif tv == "window.Object.keys":
156
- if isinstance(i[0], str) and i[0] == "window.localStorage":
157
- res = ["STATSIG_LOCAL_STORAGE_INTERNAL_STORE_V4", "STATSIG_LOCAL_STORAGE_STABLE_ID",
158
- "client-correlated-secret", "oai/apps/capExpiresAt", "oai-did",
159
- "STATSIG_LOCAL_STORAGE_LOGGING_REQUEST", "UiState.isNavigationCollapsed.1"]
160
- elif tv == "window.Math.random":
161
- res = random.random()
162
- elif callable(tv):
163
- res = tv(*i)
164
- process_map[e] = res
165
-
166
- def func_8(e: float, t: float):
167
- process_map[e] = process_map[t]
168
-
169
- def func_14(e: float, t: float):
170
- tv = process_map[t]
171
- if is_string(tv):
172
- token_list = json.loads(tv)
173
- process_map[e] = token_list
174
- else:
175
- print("func type 14 error")
176
-
177
- def func_15(e: float, t: float):
178
- tv = process_map[t]
179
- process_map[e] = json.dumps(tv)
180
-
181
- def func_18(e: float):
182
- ev = process_map[e]
183
- e_str = to_str(ev)
184
- decoded = pybase64.b64decode(e_str).decode()
185
- process_map[e] = decoded
186
-
187
- def func_19(e: float):
188
- ev = process_map[e]
189
- e_str = to_str(ev)
190
- encoded = pybase64.b64encode(e_str.encode()).decode()
191
- process_map[e] = encoded
192
-
193
- def func_20(e: float, t: float, n: float, *args):
194
- o = [process_map[arg] for arg in args]
195
- ev = process_map[e]
196
- tv = process_map[t]
197
- if ev == tv:
198
- nv = process_map[n]
199
- if callable(nv):
200
- nv(*o)
201
- else:
202
- print("func type 20 error")
203
-
204
- def func_21(*args):
205
- pass
206
-
207
- def func_23(e: float, t: float, *args):
208
- i = list(args)
209
- ev = process_map[e]
210
- tv = process_map[t]
211
- if ev is not None:
212
- if callable(tv):
213
- tv(*i)
214
-
215
- process_map.update({
216
- 1: func_1, 2: func_2, 5: func_5, 6: func_6, 24: func_24, 7: func_7,
217
- 17: func_17, 8: func_8, 10: "window", 14: func_14, 15: func_15,
218
- 18: func_18, 19: func_19, 20: func_20, 21: func_21, 23: func_23
219
- })
220
-
221
- return process_map
222
-
223
- start_time = 0
224
-
225
-
226
- def process_turnstile(dx: str, p: str) -> str:
227
- global start_time
228
- start_time = time.time()
229
- tokens = get_turnstile_token(dx, p)
230
- if tokens is None:
231
- return ""
232
-
233
- token_list = json.loads(tokens)
234
- # print(token_list)
235
- res = ""
236
- process_map = get_func_map()
237
-
238
- def func_3(e: str):
239
- nonlocal res
240
- res = pybase64.b64encode(e.encode()).decode()
241
-
242
- process_map[3] = func_3
243
- process_map[9] = token_list
244
- process_map[16] = p
245
-
246
- for token in token_list:
247
- try:
248
- e = token[0]
249
- t = token[1:]
250
- f = process_map.get(e)
251
- if callable(f):
252
- f(*t)
253
- else:
254
- pass
255
- # print(f"Warning: No function found for key {e}")
256
- except Exception as exc:
257
- pass
258
- # print(f"Error processing token {token}: {exc}")
259
-
260
- return res
261
-
262
-
263
- if __name__ == "__main__":
264
- result = process_turnstile(
265
- "PBp5bWF1cHlLe1ttQhRfaTdmXEpidGdEYU5JdGJpR3xfHFVuGHVEY0tZVG18Vh54RWJ5CXpxKXl3SUZ7b2FZAWJaTBl6RGQZURh8BndUcRlQVgoYalAca2QUX24ffQZgdVVbbmBrAH9FV08Rb2oVVgBeQVRrWFp5VGZMYWNyMnoSN0FpaQgFT1l1f3h7c1RtcQUqY1kZbFJ5BQRiZEJXS3RvHGtieh9PaBlHaXhVWnVLRUlKdwsdbUtbKGFaAlN4a0V/emUJe2J2dl9BZkAxZWU/WGocRUBnc3VyT3F4WkJmYSthdBIGf0RwQ2FjAUBnd3ZEelgbVUEIDAJjS1VZbU9sSWFjfk55J2lZFV0HWX1cbVV5dWdAfkFIAVQVbloUXQtYaAR+VXhUF1BZdG4CBHRyK21AG1JaHhBFaBwCWUlocyQGVT4NBzNON2ASFVtXeQRET1kARndjUEBDT2RKeQN7RmJjeVtvZGpDeWJ1EHxafVd+Wk1AbzdLVTpafkd9dWZKeARecGJrS0xcenZIEEJQOmcFa01menFOeVRiSGFZC1JnWUA0SU08QGgeDFFgY34YWXAdZHYaHRhANFRMOV0CZmBfVExTWh9lZlVpSnx6eQURb2poa2RkQVJ0cmF0bwJbQgB6RlRbQHRQaQFKBHtENwVDSWpgHAlbTU1hXEpwdBh2eBlNY3l2UEhnblx7AmpaQ08JDDAzJUVAbn5IA2d8XX5ZFVlrYWhSXWlYQlEdZlQ/QUwuYwJgTG5GZghSRHdCYk1CWWBjclp0aWo3TWMSQmFaaAdge05FbmFhH3hxCFZuIX1BY01WVW5ABx5jfG1ZbjcZEiwwPFYQVm0sdHV8Xnl7alRuemgKZUwICklweW1heHR5Q3UqYVoSR3BCaldIc3Z8SmJOS212CAY5AmMkYmMaRn5UXEthZFsHYFx7ZHRnYV5tcFBZeHocQxUXXU0bYk0VFUZ0ZgFrSWcMRksCAwdJEBBncF12fGUVdnFNQnl4ZQB9WUclYGMRe04TQUZMf0FEbEthW357HEN2aVhAdHAMH0NPdWFicm1YbzNRBSkWMDUAOVdXbBlfRz51ah54YG5iVX9sR2t6RF1pR1RGU20MABBWQy55T3dQfmlUfmFrA35gY2AdDiBWMWVlP1hqHEVAZ3NzfE9/c1pCZWErYXQSB2BKcENjew1baXB9Rm1aG1VBCAkJY01aWW1NbklgZH5Oek1rTX9FFEB7RHNGEG9pKH1eRgFSZGJJdkcMQHUSY0IRQRkzUmFgBG90cklvVwNZThIHQXYABjFJaApCWh1qUEhnWVpiBHxDRDlAHg8kFVcCY1dCUk8VRm9obEN9e21EdnluWxN7eWt8RnFOekRTRXZKXkNPWH40YGMRXHwfRHZ7Z1JKS2R9XG1XR09qCGlaZmZ/QXwnfloWTQxIflxbSVNdSUZgHBRLKCwpQwwmXzB2NFRMOVxUTFNfH3BoRVhfWkcBYghVaSh0ZWMFeG9qBWp5eENNeGNldncHR0wBezVPTjdlSGcOTndjVkAUVl99YQFkRUE2YlNKe3ppeml2V2lvYkhGHjtbNHIALywsMScPEjEFO3Q1MQ0UGDYvK148ETYxIzEcD0gzchNcLSs+LAJxJiEQKBd5MCsXCRclFA0gBRg3axk1HTkBGyoUPRhwCwI2OAIRB2gUBRcjATt6ORQ9JDANOHFlEQITIC8VOS4GAC49GDscBBQMNQ4hDQtQZHYMHmk3BRFHeHZvcXNvd01+WXxPFF9pN2ZaSmR3Z0RkQkl7YmlHbzMsSS8HEy4PPggxGAAYBBcuJREBEQA7LAMANgEiNiZgFR5Mchs0eH83ERFsGCceZTESe2MeEgQSGwgXIgIbb38FFBAWEC1GFC42OQ0CCwcudSIpOwY6MRw7IjwYAgAYD3UbOA8AaHoHPiUkBgQmTA4FUxgAOCoJKxNmVSoANDIzAjdlDxA6ISIOKhQDEhwLPS82IT4CUFIsOyIwLD4+BBsDAww1AnMqHAIlMiMTGT0oAQlUE3QDQhIUACMxDwhGLxEXHQsSIV0FLgMaAgJ2LgsEHyEPLBcKOBtfUhg9MiAXPT5fHhA1Wg8+BxoPLgYcGS0WRSsELjIZKg8EJw4lFQAoUCcTcxASLS9BOTsZD3ERGRUhOD1YUjJxWBEBdnc9PwkQNytyED0zAQtaG3Y2ACsWXSsoPV4+DBQ2DyQ+bg0MHxVHKhAqNh8QPVkNET5fAis5Jh0uGxACKA8kOyo6IBkHIgkKdx0sAgA8SAQVHCkCLwcoBnQHGRAeAxAXOQAdKxhrNxMLJQYrKwAxHnFcOA4HIlEEAVkVDigqAwMoORQQKFkaOy0pISMoRmYDPyFLCRIqVhwCImITET04Gx8QPTMWWRQDcgstAioLGSkBTjw7ECYLeSgraxFoazw2CQcrJgU1cQ0fAB4YEykpIQMEPgJ0NUY0Lhc8IBEEWQtyNSkeECEmHitRFhsULgUrASkfO3E6XDsqLTAVcg8pFCwUaT8rPiMALzskFQQNJBkfKgUxBwscAj4YWhYHDxoXEBRwHgUUMx4gCxsCGBRJAz5yABsCAxIPFSo2AQILLSs7NS4EAGEnFBANJBgTOV0FLWJSKAUQeRkDKyAjCjYqIwEUBwAUPT5iBgohDzYmBAEBJS4pCSspGgUQBDsuD3wvKFd7HwE/EQ8ZFQgRICYEAgUuRhovHFYdM15eNwIgZBgmBVIoJGBnACRXChIKQR8lDVh2CicfKTIBcxwzNionIg4PEVI0FyMQOTkaABI3JSoAByVTKAItJn1ULjcEOG4gBjoqDnAQDjsGHzA2cF92CTIlAhMdchoJABA6KQEyajcgBAM+IhwyE292OTQ0IzUsAVY8EBcxMRxoKgEhBRQSGTMLfQsgFDp1PDQsCgEFKAkIASA8EhF4IgpjIzMJJC4WcyYcEQkPPSMBHlUSfFkuPCQnKiMaAGYWEC80EQIeex9wJjszCSQMFg4iDDcvVxMEBR17Knw0OnMVRyc4fj9ROQpiABoWFxAscR0Na3gBHWdyPjcOBCMleBQgKR4rLQViBhcLGnEgDDZ4ACoPJhQQIH4nHBoDNhkWCyUWDRgVFx4YAwAzFjAELCUPNScjDQ4hDB54Gwg4K2g3BmMBKjkwGggiFAo0Iwp6BBQeDxYwBz4VKCIzeDQmJjYeXTUmHCZpcygrAQt3NAFrBjsmGhtWJz8uUiR3CjorPy4NJXUuOjYIBDoMDGM4MwxxNiMNGg4SES01GHA1O3EIOSo7LQUXHnEeOgIjPXENLjQSfn4OVSkSAgcFBQIxDQUuajUPOj0MFwwcZhMnVzQOCQMDAWBWZBUPPx4oBAA5YA5qBwcrEwQ+IjppEz47Ji4CE2YNKTEzAUcjBgAoFFwyKHwbCz8pARUrDgIIMgg1H2MXGTUBFx0XAgMdEj0HOQ4MIionOyE2cUcxHAA7Iw0sNTkBDUU9GRsbPgkzOBwNKD9hHBdVJipxVTYRAgMmGAIVKxc2JREoNxgtMysDHggNExYWBh8FHwUfBQ8/KQYONiUrLjkfIwpxHDgYCTw1MDEMMBU2JRErK2crDzZdCy94UjAOC00MMgFCKTJxZw8mdgoSCzQMcAtzDC8hMBw7CHJ/GjQ+Cw4aDAVyMTMwEi8gHhUfNB8sDi4hWTQ0GDdJdSEVNggXAhY7Knd3MQ4KGhoZDm11DysqLxI8NXYZCXMDMngaMQg5PSsYKjYxJRJzdx8jOzQlIwklEwgtDhEMdwskLAs3Izg7LQscJi4IeyE3GiAbDAYrHzEzEjcxKicAdSteCTMqJHsUMSEXMT0kJD4Ga3V2Kk4rMSUZHS8qMAsqHTsEPR8RXzArXzc2OgYQOy4oPXc1AQM+DhpuMDFRFTMrBn8pCQkCdCE/MDILKG8uGllRNRlGRy0NGjsyFGoTKSUsOiwkAi8sNRJUNgQ0czEuFgUNMShjBAsBDDErbywzKBoKKzkeOncPDR42HCskNGg7BjEMVgAvOyApLQ5WPgAVHiM+Jz8eOA8BOSI7Xwo4JGIJNjYdCz0MFmAuPhEbLzc3VjUQAGwoHjATcSAGdwUVCjIqMDA1OyQNUB5gGRw6UwpkNS0eECoqbCt2KzQEdD1jBzEZOxQdIjBoMxVqCyoEBToSDB5xPz44LA9MCDAKMAZhLgZZACwMKAYDPWgHODIGHiwMIDUpZ2YEMA04By8INQl3ClQLLC8wCDIIXG8/PSARMDYQLxQyeh8qFTg7MhhUDzkLKwNzDT8RPQ84JC0dDTAqGDA7KxkoKDAcPzh1KQo9LzkeN3YMIxc4HzsBNxorAj0jQX90CCMlPQ4FMTYPfDgwDA0sMyoJHyw6EigMCwULUBsDcnsAdQUAKRAMFBIqLQwCGCkLLmoOJQIEOSU/JQ0JFQgmDx02LwgrIjMLHQQ9DCw+cgoRJREWZAQkCyoyNgskJip0JDg5cy1BXXIzJAl3GCQCdggwZXEbBmcPNAwwCAV9fAkGDDUUBhBmKTgyKAo0KRklcRc/IxY5KQ8SACIKEgg4FVUuDx0FUVoiK3IuEiQEGQkkYToJDhcPJhVTfA8zMiMhFgxnAystCycgLTweB1A0GAMuACIBVEUKHSYiCR0UJA0ENQsRBwUPCgEpMCcvGyUKdxcvH3U5OAwRegMnCiE1IxYiOgsGEGoOAhg/DxJ9IggHCzESCgMsJgJ9awodFDksDRAyCyA1NwodDCwJOFcWCw0yNwokfTUKLwt3IwolIwwocTcbRRAeCwoMHiUZOWkeCRclHihWMyVVcTcfVQEkJjAyMyReOT0jEFwMC1UPPyMwATQnO1oxHz8DNSIoAScYMBMtDi8iFgwgHwwKMAxnDjsXDQooCx4YHSY4JQYYPgQ0Cz0PVkQEEQYqKCIWPTELLBsxElgUMBcENhMKPQQRbyQVRhJdREdUW0tUYB4MX2BjeAU8bxEfZUVYW1VHTF5OSQV/f1xBMU5Jamd7QX9fbWd4H3p1ZhNuYmRFVHRyZHRnBltCCnxGV1YxeEQcDUp3ZlJAFFhafWEKFUlQQ25cOW9iHm90Yk5teXpaSGdhXHsBYStPTR1fdG5wHUIAZ0ZuZWVTeFQVWWliaFxSGFRQOARhQlRVQFVpBmBObEZmAUlKdU9gW0VFbHJkXW0Ffko6cmVTfEx3CXdvV1x+eWMDE2h1IXlJZ0J1VkNKe1cGBnZkcE1gdFJbbXdsWntMECo=",
266
- "gAAAAACWzMwMzIsIlRodSBKdWwgMTEgMjAyNCAwMzoxMDo0NiBHTVQrMDgwMCAo5Lit5Zu95qCH5YeG5pe26Ze0KSIsNDI5NDcwNTE1MiwxLCJNb3ppbGxhLzUuMCAoV2luZG93cyBOVCAxMC4wOyBXaW42NDsgeDY0KSBBcHBsZVdlYktpdC81MzcuMzYgKEtIVE1MLCBsaWtlIEdlY2tvKSBDaHJvbWUvMTI2LjAuMC4wIFNhZmFyaS81MzcuMzYgRWRnLzEyNi4wLjAuMCIsImh0dHBzOi8vY2RuLm9haXN0YXRpYy5jb20vX25leHQvc3RhdGljL2NodW5rcy9wYWdlcy9fYXBwLWMwOWZmNWY0MjQwMjcwZjguanMiLCJjL1pGWGkxeTNpMnpaS0EzSVQwNzRzMy9fIiwiemgtQ04iLCJ6aC1DTixlbixlbi1HQixlbi1VUyIsMTM1LCJ3ZWJraXRUZW1wb3JhcnlTdG9yYWdl4oiSW29iamVjdCBEZXByZWNhdGVkU3RvcmFnZVF1b3RhXSIsIl9yZWFjdExpc3RlbmluZ3NxZjF0ejFzNmsiLCJmZXRjaCIsMzY1NCwiNWU1NDUzNzItMzcyNy00ZDAyLTkwMDYtMzMwMDRjMWJmYTQ2Il0="
267
- )
268
- print(result)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chatgpt/wssClient.py DELETED
@@ -1,36 +0,0 @@
1
- import json
2
- import time
3
-
4
- from utils.Logger import logger
5
- import chatgpt.globals as globals
6
-
7
-
8
- def save_wss_map(wss_map):
9
- with open(globals.WSS_MAP_FILE, "w") as file:
10
- json.dump(wss_map, file)
11
-
12
-
13
- async def token2wss(token):
14
- if not token:
15
- return False, None
16
- if token in globals.wss_map:
17
- wss_mode = globals.wss_map[token]["wss_mode"]
18
- if wss_mode:
19
- if int(time.time()) - globals.wss_map.get(token, {}).get("timestamp", 0) < 60 * 60:
20
- wss_url = globals.wss_map[token]["wss_url"]
21
- logger.info(f"token -> wss_url from cache")
22
- return wss_mode, wss_url
23
- else:
24
- logger.info(f"token -> wss_url expired")
25
- return wss_mode, None
26
- else:
27
- return False, None
28
- return False, None
29
-
30
-
31
- async def set_wss(token, wss_mode, wss_url=None):
32
- if not token:
33
- return True
34
- globals.wss_map[token] = {"timestamp": int(time.time()), "wss_url": wss_url, "wss_mode": wss_mode}
35
- save_wss_map(globals.wss_map)
36
- return True