shiv-4567892009 commited on
Commit
4a2dcef
·
verified ·
1 Parent(s): 24ae7bf

Created app.py

Browse files
Files changed (1) hide show
  1. app.py +172 -0
app.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ import uuid
4
+ import requests
5
+ import time
6
+ from flask import Flask, request, Response, jsonify
7
+
8
+ app = Flask(__name__)
9
+
10
+ # ==============================================================================
11
+ # CONFIG
12
+ # ==============================================================================
13
+ # Hardcoded credentials from hi.py
14
+ # You can also use os.environ.get("COOKIE_VALUE") if you add secrets in Space settings
15
+ RAW_COOKIE = "cf_clearance=OKEc8I4zkAdc_cYAx8zSVCOSuOSV2_kVsqBE7aaIIeM-1770044471-1.2.1.1-gaa4G052d_XBWdr.OJMsLGakekbPev1RiqMPA0vukct0SIspf0SInwXuY3yJeadggphdWtqYgRodnKKg9WfzOJh2EUtNY53fnnkHhs5ntywLMuk73FZTEI5NMim_FxjJ2xQXXRFxOAuacHCESyTeC96oOr_FrTdu0gZ0hJX3z5Wurw2h2KGEMqsAq737XtG2G6qUYckk10v4uWxLyFmEZHIiAajtwdh6F.ImIFJN_.Y; __Secure-better-auth.state=M-xPebcuX_QNYD573nAaocuMydAVHN5i.vRMOdmB9Lyb6Pyw2D7NejhtM%2FuWyWoNLGRnJv3Zou8k%3D; __Secure-better-auth.session_token=JQfRDZNrxwqaQ5kImpLqwm16hBFLPaQV.IUrxupoeprZ1fq7HftehR%2FybojAB%2FzcY%2BjT7KAiKKw8%3D; __Secure-better-auth.session_data=eyJzZXNzaW9uIjp7InNlc3Npb24iOnsiZXhwaXJlc0F0IjoiMjAyNi0wMi0wOVQxNTowMTozMS45MDBaIiwidG9rZW4iOiJKUWZSRFpOcnh3cWFRNWtJbXBMcXdtMTZoQkZMUGFRViIsImNyZWF0ZWRBdCI6IjIwMjYtMDItMDJUMTU6MDE6MzEuOTAwWiIsInVwZGF0ZWRBdCI6IjIwMjYtMDItMDJUMTU6MDE6MzEuOTAwWiIsImlwQWRkcmVzcyI6IjEwNC4yMy4xOTAuMTQ4IiwidXNlckFnZW50IjoiTW96aWxsYS81LjAgKEFuZHJvaWQgMTQ7IE1vYmlsZTsgcnY6MTQ3LjApIEdlY2tvLzE0Ny4wIEZpcmVmb3gvMTQ3LjAiLCJ1c2VySWQiOiIyZTRjYmVlYS03ODQ2LTQwYTktYmI0Yy1kNzQ3MWEzMWJiMTgiLCJpbXBlcnNvbmF0ZWRCeSI6bnVsbCwiaWQiOiJkMGU4OTZmZi04ZDhmLTQ4Y2MtOTIwZS05ZjBjYjc4OWM4NDUifSwidXNlciI6eyJuYW1lIjoiQnJ1ayBHZXRhY2hldyIsImVtYWlsIjoiYnJ1a2c5NDE5QGdtYWlsLmNvbSIsImVtYWlsVmVyaWZpZWQiOnRydWUsImltYWdlIjoiaHR0cHM6Ly9saDMuZ29vZ2xldXNlcmNvbnRlbnQuY29tL2EvQUNnOG9jSjBHeWpPWFVmSXVpVTIzeTdGcE53WExKcHdQNHRQTV95R3RhVkp2aU9HTjBITW1RPXM5Ni1jIiwiY3JlYXRlZEF0IjoiMjAyNi0wMi0wMlQxNTowMTozMS44NDZaIiwidXBkYXRlZEF0IjoiMjAyNi0wMi0wMlQxNTowMTozMS44NDZaIiwicm9sZSI6ImVkaXRvciIsImJhbm5lZCI6ZmFsc2UsImJhblJlYXNvbiI6bnVsbCwiYmFuRXhwaXJlcyI6bnVsbCwiaWQiOiIyZTRjYmVlYS03ODQ2LTQwYTktYmI0Yy1kNzQ3MWEzMWJiMTgifX0sImV4cGlyZXNBdCI6MTc3MDA1Mzg4MjkzNiwic2lnbmF0dXJlIjoiZFFTQkVZbmFlWTV2MGM1ZE8zbURWU1RSRlVDWURrMlFYN1J0bFItdVpObyJ9"
16
+
17
+ HEADERS = {
18
+ "Accept": "*/*",
19
+ "Accept-Encoding": "gzip, deflate, br, zstd",
20
+ "Accept-Language": "en-ER,am-ER;q=0.9",
21
+ "Connection": "keep-alive",
22
+ "Content-Type": "application/json",
23
+ "Origin": "https://www.cognixai.co",
24
+ "Referer": "https://www.cognixai.co/chat",
25
+ "Sec-Fetch-Dest": "empty",
26
+ "Sec-Fetch-Mode": "cors",
27
+ "Sec-Fetch-Site": "same-origin",
28
+ "User-Agent": "Mozilla/5.0 (Android 14; Mobile; rv:147.0) Gecko/147.0 Firefox/147.0"
29
+ }
30
+
31
+ def parse_cookies(cookie_string):
32
+ """Parses cookie string into dict"""
33
+ cookies = {}
34
+ items = cookie_string.split(';')
35
+ for item in items:
36
+ if '=' in item:
37
+ parts = item.strip().split('=', 1)
38
+ if len(parts) == 2:
39
+ cookies[parts[0]] = parts[1]
40
+ return cookies
41
+
42
+ COOKIES = parse_cookies(RAW_COOKIE)
43
+
44
+ # ==============================================================================
45
+ # LOGIC
46
+ # ==============================================================================
47
+
48
+ def generate_openai_chunk(content, model, finish_reason=None):
49
+ """Create OpenAI-compatible stream chunk"""
50
+ chunk_id = f"chatcmpl-{uuid.uuid4().hex[:24]}"
51
+ timestamp = int(time.time())
52
+
53
+ return {
54
+ "id": chunk_id,
55
+ "object": "chat.completion.chunk",
56
+ "created": timestamp,
57
+ "model": model,
58
+ "choices": [{
59
+ "index": 0,
60
+ "delta": {"content": content} if content else {},
61
+ "finish_reason": finish_reason
62
+ }]
63
+ }
64
+
65
+ @app.route('/v1/chat/completions', methods=['POST'])
66
+ def chat_completions():
67
+ try:
68
+ data = request.json
69
+ messages = data.get('messages', [])
70
+ model = data.get('model', 'Gemini 3 Pro Thikning') # Default from hi.py
71
+ stream = data.get('stream', False)
72
+
73
+ # 1. Extract User Prompt
74
+ last_user_content = ""
75
+ for msg in reversed(messages):
76
+ if msg['role'] == 'user':
77
+ content = msg['content']
78
+ if isinstance(content, list):
79
+ last_user_content = " ".join([p.get('text', '') for p in content if p.get('type') == 'text'])
80
+ else:
81
+ last_user_content = str(content)
82
+ break
83
+
84
+ if not last_user_content:
85
+ return jsonify({"error": "No user message found"}), 400
86
+
87
+ # 2. Build Payload for Cognix
88
+ payload = {
89
+ "id": str(uuid.uuid4()),
90
+ "chatModel": {
91
+ "provider": "google",
92
+ "model": model
93
+ },
94
+ "toolChoice": "auto",
95
+ "allowedAppDefaultToolkit": ["code", "visualization"],
96
+ "message": {
97
+ "role": "user",
98
+ "parts": [{"type": "text", "text": last_user_content}],
99
+ "id": str(uuid.uuid4())
100
+ },
101
+ "imageTool": {},
102
+ "attachments": []
103
+ }
104
+
105
+ # 3. Stream Response
106
+ def generate():
107
+ url = "https://www.cognixai.co/api/chat"
108
+ print(f"Sending to Cognix: {last_user_content[:50]}...")
109
+
110
+ with requests.post(url, headers=HEADERS, cookies=COOKIES, json=payload, stream=True) as response:
111
+ if response.status_code != 200:
112
+ yield f"data: {json.dumps({'error': f'Upstream Error: {response.status_code}'})}\n\n"
113
+ return
114
+
115
+ for line in response.iter_lines():
116
+ if line:
117
+ decoded = line.decode('utf-8')
118
+ if decoded.startswith("data: "):
119
+ try:
120
+ json_data = json.loads(decoded[6:])
121
+ delta_content = None
122
+
123
+ # Logic from hi.py
124
+ if json_data.get("type") == "reasoning-delta":
125
+ delta_content = json_data.get("delta", "")
126
+ elif json_data.get("type") == "text-delta":
127
+ delta_content = json_data.get("delta", "")
128
+ elif json_data.get("type") == "error":
129
+ print(f"Stream Error: {json_data}")
130
+
131
+ if delta_content:
132
+ chunk = generate_openai_chunk(delta_content, model)
133
+ yield f"data: {json.dumps(chunk)}\n\n"
134
+
135
+ except Exception as e:
136
+ print(f"Parse error: {e}")
137
+ continue
138
+
139
+ # End of stream
140
+ final_chunk = generate_openai_chunk(None, model, "stop")
141
+ yield f"data: {json.dumps(final_chunk)}\n\n"
142
+ yield "data: [DONE]\n\n"
143
+
144
+ if stream:
145
+ return Response(generate(), mimetype='text/event-stream')
146
+ else:
147
+ # Non-streaming fallback (collect all)
148
+ full_response = ""
149
+ for chunk_str in generate():
150
+ if chunk_str.startswith("data: [DONE]"): break
151
+ if chunk_str.startswith("data: "):
152
+ try:
153
+ chunk = json.loads(chunk_str[6:])
154
+ if 'choices' in chunk:
155
+ content = chunk['choices'][0]['delta'].get('content', '')
156
+ full_response += content
157
+ except: pass
158
+
159
+ return jsonify({
160
+ "id": str(uuid.uuid4()),
161
+ "object": "chat.completion",
162
+ "created": int(time.time()),
163
+ "model": model,
164
+ "choices": [{"index": 0, "message": {"role": "assistant", "content": full_response}, "finish_reason": "stop"}]
165
+ })
166
+
167
+ except Exception as e:
168
+ print(f"Error: {e}")
169
+ return jsonify({"error": str(e)}), 500
170
+
171
+ if __name__ == '__main__':
172
+ app.run(host='0.0.0.0', port=7860)