khirodsahoo93 commited on
Commit
736bdb9
Β·
verified Β·
1 Parent(s): 769b2c0

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -2
app.py CHANGED
@@ -150,12 +150,20 @@ def stream_huggingface(python, model_name):
150
  # Get HF token (optional - works without it but with rate limits)
151
  hf_token = os.environ.get("HF_TOKEN", "")
152
 
 
 
 
 
 
 
153
  # Get the model ID
154
  model_id = HF_MODELS.get(model_name)
155
  if not model_id:
156
  yield f"❌ Unknown model: {model_name}"
157
  return
158
 
 
 
159
  headers = {"Authorization": f"Bearer {hf_token}"} if hf_token else {}
160
 
161
  # Prepare the prompt
@@ -233,15 +241,22 @@ def stream_huggingface(python, model_name):
233
  yield f"❌ Error from Hugging Face (HTTP {response.status_code}):\n{error_msg}\n\n"
234
  yield f"πŸ’‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for now (they're working!)"
235
 
236
- except requests.exceptions.Timeout:
237
- yield f"⏱️ Request timed out.\n\n"
238
  yield f"Model might be loading (cold start). Try again in 30-60 seconds.\n\n"
239
  yield f"πŸ’‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
 
 
 
 
240
  except requests.exceptions.RequestException as req_err:
241
  yield f"🌐 Network error: {str(req_err)}\n\n"
 
242
  yield f"πŸ’‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
243
  except Exception as e:
244
  yield f"❌ Unexpected error: {str(e)}\n\n"
 
 
245
  yield f"πŸ’‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for reliable results!"
246
 
247
  def optimize(python, model):
 
150
  # Get HF token (optional - works without it but with rate limits)
151
  hf_token = os.environ.get("HF_TOKEN", "")
152
 
153
+ # Debug info
154
+ if hf_token:
155
+ yield f"πŸ”‘ Using HF token (first 10 chars): {hf_token[:10]}...\n\n"
156
+ else:
157
+ yield f"⚠️ No HF_TOKEN found - using public API (limited)\n\n"
158
+
159
  # Get the model ID
160
  model_id = HF_MODELS.get(model_name)
161
  if not model_id:
162
  yield f"❌ Unknown model: {model_name}"
163
  return
164
 
165
+ yield f"πŸ“‘ Calling model: {model_id}\n\n"
166
+
167
  headers = {"Authorization": f"Bearer {hf_token}"} if hf_token else {}
168
 
169
  # Prepare the prompt
 
241
  yield f"❌ Error from Hugging Face (HTTP {response.status_code}):\n{error_msg}\n\n"
242
  yield f"πŸ’‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for now (they're working!)"
243
 
244
+ except requests.exceptions.Timeout as timeout_err:
245
+ yield f"⏱️ Request timed out: {str(timeout_err)}\n\n"
246
  yield f"Model might be loading (cold start). Try again in 30-60 seconds.\n\n"
247
  yield f"πŸ’‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
248
+ except requests.exceptions.ConnectionError as conn_err:
249
+ yield f"🌐 Connection error: {str(conn_err)}\n\n"
250
+ yield f"Cannot reach Hugging Face API. Check your internet connection.\n\n"
251
+ yield f"πŸ’‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
252
  except requests.exceptions.RequestException as req_err:
253
  yield f"🌐 Network error: {str(req_err)}\n\n"
254
+ yield f"Type: {type(req_err).__name__}\n\n"
255
  yield f"πŸ’‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
256
  except Exception as e:
257
  yield f"❌ Unexpected error: {str(e)}\n\n"
258
+ yield f"Error type: {type(e).__name__}\n"
259
+ yield f"Full details: {repr(e)}\n\n"
260
  yield f"πŸ’‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for reliable results!"
261
 
262
  def optimize(python, model):