khirodsahoo93 commited on
Commit
769b2c0
Β·
verified Β·
1 Parent(s): b731b47

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -13
app.py CHANGED
@@ -178,25 +178,71 @@ def stream_huggingface(python, model_name):
178
  timeout=60
179
  )
180
 
 
 
 
 
 
 
 
181
  if response.status_code == 200:
182
- result = response.json()
183
- if isinstance(result, list) and len(result) > 0:
184
- generated_text = result[0].get("generated_text", "")
185
- else:
186
- generated_text = result.get("generated_text", "")
187
-
188
- # Clean up the response
189
- reply = generated_text.replace('```cpp\n','').replace('```','')
190
- yield reply
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  else:
192
- error_msg = response.json().get("error", "Unknown error")
193
- if "loading" in error_msg.lower():
 
 
 
 
194
  yield f"⏳ Model is loading... This may take 20-30 seconds. Please try again."
195
  else:
196
- yield f"❌ Error from Hugging Face: {error_msg}"
 
197
 
 
 
 
 
 
 
 
198
  except Exception as e:
199
- yield f"❌ Error calling Hugging Face: {str(e)}"
 
200
 
201
  def optimize(python, model):
202
  """Convert Python to C++ using selected AI model"""
 
178
  timeout=60
179
  )
180
 
181
+ # Check if response body is empty
182
+ if not response.text or len(response.text.strip()) == 0:
183
+ yield f"⏳ Model is loading or initializing...\n\n"
184
+ yield f"This happens on first use. Please try again in 30-60 seconds.\n\n"
185
+ yield f"πŸ’‘ Quick alternative: Use GPT-4o or Claude-3.5-Sonnet (instant results!)"
186
+ return
187
+
188
  if response.status_code == 200:
189
+ try:
190
+ result = response.json()
191
+ if isinstance(result, list) and len(result) > 0:
192
+ generated_text = result[0].get("generated_text", "")
193
+ else:
194
+ generated_text = result.get("generated_text", "")
195
+
196
+ if not generated_text or len(generated_text.strip()) == 0:
197
+ yield f"⚠️ Model returned empty response.\n\n"
198
+ yield f"Try again or use GPT-4o/Claude-3.5-Sonnet instead."
199
+ return
200
+
201
+ # Clean up the response
202
+ reply = generated_text.replace('```cpp\n','').replace('```','')
203
+ yield reply
204
+ except ValueError as json_err:
205
+ # JSON parsing failed
206
+ yield f"⚠️ Model response format error.\n\n"
207
+ yield f"The model might still be warming up. Try again in 30 seconds.\n\n"
208
+ yield f"πŸ’‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
209
+
210
+ elif response.status_code == 401 or response.status_code == 403:
211
+ # Authentication error - need HF token
212
+ yield f"πŸ”‘ Authentication Required!\n\n"
213
+ yield f"To use open-source models, you need a FREE Hugging Face token:\n\n"
214
+ yield f"1. Get token: https://huggingface.co/settings/tokens\n"
215
+ yield f"2. Add HF_TOKEN secret in Space Settings\n"
216
+ yield f"3. Factory reboot\n\n"
217
+ yield f"OR use GPT-4o/Claude-3.5-Sonnet instead (they work now!)"
218
+ elif response.status_code == 503:
219
+ # Service unavailable - model loading
220
+ yield f"⏳ Model is currently loading (cold start)...\n\n"
221
+ yield f"This can take 30-60 seconds on first use.\n"
222
+ yield f"Please wait a minute and try again.\n\n"
223
+ yield f"πŸ’‘ Quick solution: Use GPT-4o or Claude-3.5-Sonnet (no waiting!)"
224
  else:
225
+ try:
226
+ error_msg = response.json().get("error", "Unknown error")
227
+ except:
228
+ error_msg = response.text[:200] if response.text else "Empty response"
229
+
230
+ if "loading" in str(error_msg).lower():
231
  yield f"⏳ Model is loading... This may take 20-30 seconds. Please try again."
232
  else:
233
+ yield f"❌ Error from Hugging Face (HTTP {response.status_code}):\n{error_msg}\n\n"
234
+ yield f"πŸ’‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for now (they're working!)"
235
 
236
+ except requests.exceptions.Timeout:
237
+ yield f"⏱️ Request timed out.\n\n"
238
+ yield f"Model might be loading (cold start). Try again in 30-60 seconds.\n\n"
239
+ yield f"πŸ’‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
240
+ except requests.exceptions.RequestException as req_err:
241
+ yield f"🌐 Network error: {str(req_err)}\n\n"
242
+ yield f"πŸ’‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
243
  except Exception as e:
244
+ yield f"❌ Unexpected error: {str(e)}\n\n"
245
+ yield f"πŸ’‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for reliable results!"
246
 
247
  def optimize(python, model):
248
  """Convert Python to C++ using selected AI model"""