TS447 commited on
Commit
81ae84e
·
verified ·
1 Parent(s): 16a1744

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -25
app.py CHANGED
@@ -19,13 +19,16 @@ HF_TOKEN = os.environ.get("HF_TOKEN")
19
  TG_BOT_TOKEN = os.environ.get("TELEGRAM_BOT_TOKEN")
20
  TG_CHAT_ID = os.environ.get("TELEGRAM_CHAT_ID")
21
 
22
- # MODELS
23
  TEXT_MODEL = "Qwen/Qwen2.5-Coder-32B-Instruct"
24
- VISION_MODEL = "Qwen/Qwen2-VL-7B-Instruct" # Sabse fast vision model
 
 
 
25
 
26
  @app.route('/', methods=['GET'])
27
  def home():
28
- return jsonify({"status": "TS AI Brain (Final Debug Mode) is Active! 🔥"})
29
 
30
  @app.route('/api/chat', methods=['POST'])
31
  def chat():
@@ -34,10 +37,10 @@ def chat():
34
  image_url = None
35
 
36
  if is_form_data or request.files:
37
- user_message = request.form.get("message", "Is photo ko read karo.")
38
  image_file = request.files.get("image")
39
  if image_file:
40
- # 1. TELEGRAM UPLOAD
41
  tg_url = f"https://api.telegram.org/bot{TG_BOT_TOKEN}/sendPhoto"
42
  files = {'photo': (image_file.filename, image_file.read(), image_file.mimetype)}
43
  data = {'chat_id': TG_CHAT_ID}
@@ -49,6 +52,7 @@ def chat():
49
  image_url = f"https://api.telegram.org/file/bot{TG_BOT_TOKEN}/{file_info['result']['file_path']}"
50
  except Exception as e:
51
  return jsonify({"error": "Telegram upload fail", "details": str(e)}), 500
 
52
  else:
53
  data = request.json
54
  user_message = data.get("message", "")
@@ -58,46 +62,50 @@ def chat():
58
  doc = doc_ref.get()
59
  history = doc.to_dict().get('messages', []) if doc.exists else [{"role": "system", "content": "Tum TS Boss ke AI ho. Hinglish mein baat karo."}]
60
 
61
- headers = {"Authorization": f"Bearer {HF_TOKEN}", "Content-Type": "application/json"}
 
 
 
 
62
 
63
  try:
64
  if image_url:
65
- # 2. VISION API CALL (Direct Model Path)
66
  payload = {
67
  "model": VISION_MODEL,
68
- "messages": [{"role": "user", "content": [{"type": "text", "text": user_message}, {"type": "image_url", "image_url": {"url": image_url}}]}],
 
 
 
 
 
 
 
 
69
  "max_tokens": 1000
70
  }
71
- # Direct Inference URL for Vision
72
- V_URL = f"https://api-inference.huggingface.co/models/{VISION_MODEL}/v1/chat/completions"
73
- resp = requests.post(V_URL, headers=headers, json=payload)
74
-
75
- # Agar model load ho raha ho (503), toh thoda wait karke dobara try karein
76
- if resp.status_code == 503:
77
- time.sleep(5)
78
- resp = requests.post(V_URL, headers=headers, json=payload)
79
-
80
  result = resp.json()
 
81
  if "choices" in result:
82
- reply = result["choices"][0]["message"]["content"].strip()
83
  else:
84
- return jsonify({"error": "Vision Model ne sahi jawab nahi diya", "api_response": result}), 500
85
 
86
- history.append({"role": "user", "content": f"[Screenshot Sent] {user_message}"})
87
  else:
88
- # 3. TEXT API CALL
89
  history.append({"role": "user", "content": user_message})
90
  payload = {"model": TEXT_MODEL, "messages": history, "max_tokens": 1000}
91
- T_URL = "https://router.huggingface.co/v1/chat/completions"
92
- resp = requests.post(T_URL, headers=headers, json=payload)
93
- reply = resp.json()["choices"]["message"]["content"].strip()
94
 
95
  history.append({"role": "assistant", "content": reply})
96
  doc_ref.set({'messages': history})
97
  return jsonify({"reply": reply})
98
 
99
  except Exception as e:
100
- return jsonify({"error": "System Crash!", "details": str(e)}), 500
101
 
102
  if __name__ == '__main__':
103
  app.run(host='0.0.0.0', port=7860)
 
19
  TG_BOT_TOKEN = os.environ.get("TELEGRAM_BOT_TOKEN")
20
  TG_CHAT_ID = os.environ.get("TELEGRAM_CHAT_ID")
21
 
22
+ # MODELS (Router friendly models)
23
  TEXT_MODEL = "Qwen/Qwen2.5-Coder-32B-Instruct"
24
+ VISION_MODEL = "meta-llama/Llama-3.2-11B-Vision-Instruct"
25
+
26
+ # GLOBAL ROUTER URL (Jo HF ne manga hai)
27
+ ROUTER_URL = "https://router.huggingface.co/v1/chat/completions"
28
 
29
  @app.route('/', methods=['GET'])
30
  def home():
31
+ return jsonify({"status": "TS AI Brain (Official Router Mode) is Active! 🔥"})
32
 
33
  @app.route('/api/chat', methods=['POST'])
34
  def chat():
 
37
  image_url = None
38
 
39
  if is_form_data or request.files:
40
+ user_message = request.form.get("message", "Explain this image.")
41
  image_file = request.files.get("image")
42
  if image_file:
43
+ # 1. TELEGRAM UPLOAD (STORAGE)
44
  tg_url = f"https://api.telegram.org/bot{TG_BOT_TOKEN}/sendPhoto"
45
  files = {'photo': (image_file.filename, image_file.read(), image_file.mimetype)}
46
  data = {'chat_id': TG_CHAT_ID}
 
52
  image_url = f"https://api.telegram.org/file/bot{TG_BOT_TOKEN}/{file_info['result']['file_path']}"
53
  except Exception as e:
54
  return jsonify({"error": "Telegram upload fail", "details": str(e)}), 500
55
+
56
  else:
57
  data = request.json
58
  user_message = data.get("message", "")
 
62
  doc = doc_ref.get()
63
  history = doc.to_dict().get('messages', []) if doc.exists else [{"role": "system", "content": "Tum TS Boss ke AI ho. Hinglish mein baat karo."}]
64
 
65
+ headers = {
66
+ "Authorization": f"Bearer {HF_TOKEN}",
67
+ "Content-Type": "application/json",
68
+ "x-wait-for-model": "true" # Ye HF ko bolta hai ki model load hone tak ruko!
69
+ }
70
 
71
  try:
72
  if image_url:
73
+ # --- VISION CALL VIA ROUTER ---
74
  payload = {
75
  "model": VISION_MODEL,
76
+ "messages": [
77
+ {
78
+ "role": "user",
79
+ "content": [
80
+ {"type": "text", "text": user_message},
81
+ {"type": "image_url", "image_url": {"url": image_url}}
82
+ ]
83
+ }
84
+ ],
85
  "max_tokens": 1000
86
  }
87
+ resp = requests.post(ROUTER_URL, headers=headers, json=payload)
 
 
 
 
 
 
 
 
88
  result = resp.json()
89
+
90
  if "choices" in result:
91
+ reply = result["choices"]["message"]["content"]
92
  else:
93
+ return jsonify({"error": "Router Vision Fail", "api_response": result}), 500
94
 
95
+ history.append({"role": "user", "content": f"[Screenshot] {user_message}"})
96
  else:
97
+ # --- TEXT CALL VIA ROUTER ---
98
  history.append({"role": "user", "content": user_message})
99
  payload = {"model": TEXT_MODEL, "messages": history, "max_tokens": 1000}
100
+ resp = requests.post(ROUTER_URL, headers=headers, json=payload)
101
+ reply = resp.json()["choices"][0]["message"]["content"]
 
102
 
103
  history.append({"role": "assistant", "content": reply})
104
  doc_ref.set({'messages': history})
105
  return jsonify({"reply": reply})
106
 
107
  except Exception as e:
108
+ return jsonify({"error": "System Crash", "details": str(e)}), 500
109
 
110
  if __name__ == '__main__':
111
  app.run(host='0.0.0.0', port=7860)