Shirpi commited on
Commit
b877f0b
Β·
verified Β·
1 Parent(s): 1f29ed1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +60 -46
app.py CHANGED
@@ -4,15 +4,19 @@ import time
4
  import json
5
  import markdown
6
  from flask import Flask, request, jsonify, render_template_string
7
- import google.generativeai as genai # πŸ‘ˆ CLASSIC LIBRARY (STABLE)
8
 
9
  # ==========================================
10
  # πŸ‘‡ API KEYS SETUP πŸ‘‡
11
  # ==========================================
12
  keys_string = os.environ.get("API_KEYS", "")
 
13
  API_KEYS = [k.strip() for k in keys_string.replace(',', ' ').replace('\n', ' ').split() if k.strip()]
14
 
15
- print(f"βœ… Loaded {len(API_KEYS)} API Keys.") # Debug Log
 
 
 
16
 
17
  current_key_index = 0
18
  app = Flask(__name__)
@@ -45,67 +49,77 @@ PERMANENT RULES:
45
  4. **BRIEF:** Keep explanations short and crisp.
46
  """
47
 
 
 
 
 
 
 
48
  def generate_with_retry(prompt, history_messages=[]):
49
  """
50
- Uses Standard Library (google-generativeai)
51
- Model: gemini-1.5-flash (Proven & Stable)
 
52
  """
53
  global current_key_index
54
 
55
- if not API_KEYS:
56
- return "❌ Error: API Keys Missing in Secrets."
57
 
58
- # CONVERT HISTORY TO OLD SDK FORMAT
59
- # Old SDK expects: [{'role': 'user', 'parts': ['text']}, {'role': 'model', 'parts': ['text']}]
60
  recent_history = history_messages[-10:]
61
  formatted_history = []
62
-
63
  for msg in recent_history:
64
  role = "user" if msg["role"] == "user" else "model"
65
- formatted_history.append({
66
- "role": role,
67
- "parts": [msg["content"]]
68
- })
69
 
70
  last_error = ""
 
 
 
71
 
72
- # RETRY LOOP
73
- for attempt in range(len(API_KEYS) * 2):
74
  key = API_KEYS[current_key_index]
75
 
76
- try:
77
- # 1. Configure Key
78
- genai.configure(api_key=key)
79
-
80
- # 2. Initialize Model (Classic Way)
81
- model = genai.GenerativeModel(
82
- model_name="gemini-1.5-flash",
83
- system_instruction=SYSTEM_INSTRUCTION
84
- )
85
-
86
- # 3. Start Chat & Send Message
87
- chat = model.start_chat(history=formatted_history)
88
- response = chat.send_message(prompt)
89
-
90
- return response.text
91
-
92
- except Exception as e:
93
- error_msg = str(e)
94
- print(f"⚠️ Key #{current_key_index+1} Failed: {error_msg}")
95
-
96
- if "404" in error_msg:
97
- # If 404 happens here, it's purely a Model Name issue, but gemini-1.5-flash IS correct.
98
- # So it usually means key access issue.
99
- pass
100
 
101
- last_error = error_msg
102
- current_key_index = (current_key_index + 1) % len(API_KEYS)
103
- time.sleep(1)
104
- continue
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
 
106
- return f"⚠️ **Connection Error:** {last_error}<br>Try restarting the Space."
107
 
108
- # --- UI TEMPLATE (KEEPING THE WORKING UI) ---
109
  HTML_TEMPLATE = """
110
  <!DOCTYPE html>
111
  <html lang="en">
@@ -314,7 +328,7 @@ def chat():
314
  try:
315
  previous_messages = user_db[user][cid]['messages'][:-1]
316
 
317
- # Call the classic generation function
318
  raw_response = generate_with_retry(msg, previous_messages)
319
 
320
  formatted_html = markdown.markdown(raw_response, extensions=['fenced_code', 'codehilite'])
 
4
  import json
5
  import markdown
6
  from flask import Flask, request, jsonify, render_template_string
7
+ import google.generativeai as genai
8
 
9
  # ==========================================
10
  # πŸ‘‡ API KEYS SETUP πŸ‘‡
11
  # ==========================================
12
  keys_string = os.environ.get("API_KEYS", "")
13
+ # Clean up keys (remove spaces/newlines)
14
  API_KEYS = [k.strip() for k in keys_string.replace(',', ' ').replace('\n', ' ').split() if k.strip()]
15
 
16
+ # πŸ‘‡ MODELS TO TRY (Priority Order) πŸ‘‡
17
+ MODELS = ["gemini-1.5-flash", "gemini-1.5-pro", "gemini-pro"]
18
+
19
+ print(f"βœ… Loaded {len(API_KEYS)} API Keys.")
20
 
21
  current_key_index = 0
22
  app = Flask(__name__)
 
49
  4. **BRIEF:** Keep explanations short and crisp.
50
  """
51
 
52
+ def get_next_key():
53
+ """Rotates to the next available API Key"""
54
+ global current_key_index
55
+ current_key_index = (current_key_index + 1) % len(API_KEYS)
56
+ return API_KEYS[current_key_index]
57
+
58
  def generate_with_retry(prompt, history_messages=[]):
59
  """
60
+ SMART LOGIC:
61
+ - If 429 (Quota): Switch KEY immediately.
62
+ - If 404 (Not Found): Switch MODEL immediately.
63
  """
64
  global current_key_index
65
 
66
+ if not API_KEYS: return "❌ Error: API Keys Missing in Secrets."
 
67
 
68
+ # History Formatting
 
69
  recent_history = history_messages[-10:]
70
  formatted_history = []
 
71
  for msg in recent_history:
72
  role = "user" if msg["role"] == "user" else "model"
73
+ formatted_history.append({"role": role, "parts": [msg["content"]]})
 
 
 
74
 
75
  last_error = ""
76
+
77
+ # We allow (Number of Keys * Number of Models) attempts total
78
+ max_attempts = len(API_KEYS) * len(MODELS)
79
 
80
+ for attempt in range(max_attempts):
 
81
  key = API_KEYS[current_key_index]
82
 
83
+ # Try current key with all models priority
84
+ for model_name in MODELS:
85
+ try:
86
+ genai.configure(api_key=key)
87
+ model = genai.GenerativeModel(
88
+ model_name=model_name,
89
+ system_instruction=SYSTEM_INSTRUCTION
90
+ )
91
+
92
+ chat = model.start_chat(history=formatted_history)
93
+ response = chat.send_message(prompt)
94
+
95
+ # If success, return text
96
+ return response.text
 
 
 
 
 
 
 
 
 
 
97
 
98
+ except Exception as e:
99
+ error_str = str(e).lower()
100
+
101
+ # CASE 1: QUOTA EXCEEDED (429) -> SWITCH KEY
102
+ if "429" in error_str or "exhausted" in error_str:
103
+ print(f"⚠️ Key #{current_key_index+1} Quota Full ({model_name}). Switching Key...")
104
+ get_next_key() # Rotate Key
105
+ time.sleep(1) # Cool down
106
+ break # Break inner model loop, try new key
107
+
108
+ # CASE 2: MODEL NOT FOUND (404) -> TRY NEXT MODEL (SAME KEY)
109
+ elif "404" in error_str or "not found" in error_str:
110
+ print(f"⚠️ {model_name} not found. Trying next model...")
111
+ continue # Try next model in the list
112
+
113
+ # CASE 3: OTHER ERRORS -> SWITCH KEY
114
+ else:
115
+ print(f"⚠️ Error on {model_name}: {error_str}")
116
+ last_error = error_str
117
+ get_next_key()
118
+ break
119
 
120
+ return f"⚠️ **System Busy:** All keys/models failed. (Last Error: {last_error})"
121
 
122
+ # --- UI TEMPLATE (STABLE - NO CHANGES) ---
123
  HTML_TEMPLATE = """
124
  <!DOCTYPE html>
125
  <html lang="en">
 
328
  try:
329
  previous_messages = user_db[user][cid]['messages'][:-1]
330
 
331
+ # Call the SMART generation function
332
  raw_response = generate_with_retry(msg, previous_messages)
333
 
334
  formatted_html = markdown.markdown(raw_response, extensions=['fenced_code', 'codehilite'])