Spaces:
Runtime error
Runtime error
Upload app.py
Browse files
app.py
CHANGED
|
@@ -150,12 +150,20 @@ def stream_huggingface(python, model_name):
|
|
| 150 |
# Get HF token (optional - works without it but with rate limits)
|
| 151 |
hf_token = os.environ.get("HF_TOKEN", "")
|
| 152 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
# Get the model ID
|
| 154 |
model_id = HF_MODELS.get(model_name)
|
| 155 |
if not model_id:
|
| 156 |
yield f"β Unknown model: {model_name}"
|
| 157 |
return
|
| 158 |
|
|
|
|
|
|
|
| 159 |
headers = {"Authorization": f"Bearer {hf_token}"} if hf_token else {}
|
| 160 |
|
| 161 |
# Prepare the prompt
|
|
@@ -233,15 +241,22 @@ def stream_huggingface(python, model_name):
|
|
| 233 |
yield f"β Error from Hugging Face (HTTP {response.status_code}):\n{error_msg}\n\n"
|
| 234 |
yield f"π‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for now (they're working!)"
|
| 235 |
|
| 236 |
-
except requests.exceptions.Timeout:
|
| 237 |
-
yield f"β±οΈ Request timed out
|
| 238 |
yield f"Model might be loading (cold start). Try again in 30-60 seconds.\n\n"
|
| 239 |
yield f"π‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 240 |
except requests.exceptions.RequestException as req_err:
|
| 241 |
yield f"π Network error: {str(req_err)}\n\n"
|
|
|
|
| 242 |
yield f"π‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
|
| 243 |
except Exception as e:
|
| 244 |
yield f"β Unexpected error: {str(e)}\n\n"
|
|
|
|
|
|
|
| 245 |
yield f"π‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for reliable results!"
|
| 246 |
|
| 247 |
def optimize(python, model):
|
|
|
|
| 150 |
# Get HF token (optional - works without it but with rate limits)
|
| 151 |
hf_token = os.environ.get("HF_TOKEN", "")
|
| 152 |
|
| 153 |
+
# Debug info
|
| 154 |
+
if hf_token:
|
| 155 |
+
yield f"π Using HF token (first 10 chars): {hf_token[:10]}...\n\n"
|
| 156 |
+
else:
|
| 157 |
+
yield f"β οΈ No HF_TOKEN found - using public API (limited)\n\n"
|
| 158 |
+
|
| 159 |
# Get the model ID
|
| 160 |
model_id = HF_MODELS.get(model_name)
|
| 161 |
if not model_id:
|
| 162 |
yield f"β Unknown model: {model_name}"
|
| 163 |
return
|
| 164 |
|
| 165 |
+
yield f"π‘ Calling model: {model_id}\n\n"
|
| 166 |
+
|
| 167 |
headers = {"Authorization": f"Bearer {hf_token}"} if hf_token else {}
|
| 168 |
|
| 169 |
# Prepare the prompt
|
|
|
|
| 241 |
yield f"β Error from Hugging Face (HTTP {response.status_code}):\n{error_msg}\n\n"
|
| 242 |
yield f"π‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for now (they're working!)"
|
| 243 |
|
| 244 |
+
except requests.exceptions.Timeout as timeout_err:
|
| 245 |
+
yield f"β±οΈ Request timed out: {str(timeout_err)}\n\n"
|
| 246 |
yield f"Model might be loading (cold start). Try again in 30-60 seconds.\n\n"
|
| 247 |
yield f"π‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
|
| 248 |
+
except requests.exceptions.ConnectionError as conn_err:
|
| 249 |
+
yield f"π Connection error: {str(conn_err)}\n\n"
|
| 250 |
+
yield f"Cannot reach Hugging Face API. Check your internet connection.\n\n"
|
| 251 |
+
yield f"π‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
|
| 252 |
except requests.exceptions.RequestException as req_err:
|
| 253 |
yield f"π Network error: {str(req_err)}\n\n"
|
| 254 |
+
yield f"Type: {type(req_err).__name__}\n\n"
|
| 255 |
yield f"π‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
|
| 256 |
except Exception as e:
|
| 257 |
yield f"β Unexpected error: {str(e)}\n\n"
|
| 258 |
+
yield f"Error type: {type(e).__name__}\n"
|
| 259 |
+
yield f"Full details: {repr(e)}\n\n"
|
| 260 |
yield f"π‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for reliable results!"
|
| 261 |
|
| 262 |
def optimize(python, model):
|