Spaces:
Paused
Paused
Tonic commited on
space gpu cuda fix
Browse files
app.py
CHANGED
|
@@ -17,7 +17,7 @@ import gc
|
|
| 17 |
from contextlib import contextmanager
|
| 18 |
import os
|
| 19 |
from loadimg import load_img
|
| 20 |
-
|
| 21 |
|
| 22 |
title = "# **WIP / DEMO** 🙋🏻♂️Welcome to Tonic's Pixtral Model Demo"
|
| 23 |
description = """
|
|
@@ -206,8 +206,31 @@ def gpu_memory_manager():
|
|
| 206 |
torch.cuda.empty_cache()
|
| 207 |
gc.collect()
|
| 208 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 209 |
|
| 210 |
@spaces.GPU(duration=120)
|
|
|
|
| 211 |
def generate_text(image, prompt, max_tokens):
|
| 212 |
try:
|
| 213 |
with gpu_memory_manager():
|
|
@@ -243,9 +266,12 @@ def generate_text(image, prompt, max_tokens):
|
|
| 243 |
|
| 244 |
return generated_text, len(generated_ids[0]), 1
|
| 245 |
except Exception as e:
|
|
|
|
|
|
|
| 246 |
return f"Error: {str(e)}", 0, 0
|
| 247 |
|
| 248 |
@spaces.GPU(duration=60)
|
|
|
|
| 249 |
def calculate_similarity(image1, image2):
|
| 250 |
try:
|
| 251 |
with gpu_memory_manager():
|
|
@@ -269,6 +295,8 @@ def calculate_similarity(image1, image2):
|
|
| 269 |
|
| 270 |
return similarity
|
| 271 |
except Exception as e:
|
|
|
|
|
|
|
| 272 |
return f"Error: {str(e)}"
|
| 273 |
|
| 274 |
with gr.Blocks() as demo:
|
|
@@ -324,4 +352,8 @@ with gr.Blocks() as demo:
|
|
| 324 |
)
|
| 325 |
|
| 326 |
if __name__ == "__main__":
|
| 327 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
from contextlib import contextmanager
|
| 18 |
import os
|
| 19 |
from loadimg import load_img
|
| 20 |
+
import traceback
|
| 21 |
|
| 22 |
title = "# **WIP / DEMO** 🙋🏻♂️Welcome to Tonic's Pixtral Model Demo"
|
| 23 |
description = """
|
|
|
|
| 206 |
torch.cuda.empty_cache()
|
| 207 |
gc.collect()
|
| 208 |
|
| 209 |
+
def cuda_error_handler(func):
|
| 210 |
+
def wrapper(*args, **kwargs):
|
| 211 |
+
try:
|
| 212 |
+
return func(*args, **kwargs)
|
| 213 |
+
except RuntimeError as e:
|
| 214 |
+
if "CUDA" in str(e):
|
| 215 |
+
print(f"CUDA error occurred: {str(e)}")
|
| 216 |
+
print("Attempting to recover...")
|
| 217 |
+
torch.cuda.empty_cache()
|
| 218 |
+
gc.collect()
|
| 219 |
+
try:
|
| 220 |
+
return func(*args, **kwargs)
|
| 221 |
+
except Exception as e2:
|
| 222 |
+
print(f"Recovery failed. Error: {str(e2)}")
|
| 223 |
+
return f"An error occurred: {str(e2)}", 0, 0
|
| 224 |
+
else:
|
| 225 |
+
raise
|
| 226 |
+
except Exception as e:
|
| 227 |
+
print(f"An unexpected error occurred: {str(e)}")
|
| 228 |
+
traceback.print_exc()
|
| 229 |
+
return f"An unexpected error occurred: {str(e)}", 0, 0
|
| 230 |
+
return wrapper
|
| 231 |
|
| 232 |
@spaces.GPU(duration=120)
|
| 233 |
+
@cuda_error_handler
|
| 234 |
def generate_text(image, prompt, max_tokens):
|
| 235 |
try:
|
| 236 |
with gpu_memory_manager():
|
|
|
|
| 266 |
|
| 267 |
return generated_text, len(generated_ids[0]), 1
|
| 268 |
except Exception as e:
|
| 269 |
+
print(f"Error in generate_text: {str(e)}")
|
| 270 |
+
traceback.print_exc()
|
| 271 |
return f"Error: {str(e)}", 0, 0
|
| 272 |
|
| 273 |
@spaces.GPU(duration=60)
|
| 274 |
+
@cuda_error_handler
|
| 275 |
def calculate_similarity(image1, image2):
|
| 276 |
try:
|
| 277 |
with gpu_memory_manager():
|
|
|
|
| 295 |
|
| 296 |
return similarity
|
| 297 |
except Exception as e:
|
| 298 |
+
print(f"Error in calculate_similarity: {str(e)}")
|
| 299 |
+
traceback.print_exc()
|
| 300 |
return f"Error: {str(e)}"
|
| 301 |
|
| 302 |
with gr.Blocks() as demo:
|
|
|
|
| 352 |
)
|
| 353 |
|
| 354 |
if __name__ == "__main__":
|
| 355 |
+
try:
|
| 356 |
+
demo.launch()
|
| 357 |
+
except Exception as e:
|
| 358 |
+
print(f"An error occurred while launching the demo: {str(e)}")
|
| 359 |
+
traceback.print_exc()
|