Spaces:
Sleeping
Sleeping
Commit ·
4a05067
1
Parent(s): 4726955
Fix safetensors size validation bug
Browse files- quantizer.py +14 -9
quantizer.py
CHANGED
|
@@ -51,15 +51,20 @@ async def quantize_model(job: Dict) -> Dict:
|
|
| 51 |
|
| 52 |
# Check size
|
| 53 |
if hasattr(model_info, 'safetensors') and model_info.safetensors:
|
| 54 |
-
total_size =
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
if
|
| 62 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
|
| 64 |
except Exception as e:
|
| 65 |
raise Exception(f"Model validation failed: {str(e)}")
|
|
|
|
| 51 |
|
| 52 |
# Check size
|
| 53 |
if hasattr(model_info, 'safetensors') and model_info.safetensors:
|
| 54 |
+
total_size = 0
|
| 55 |
+
for file_info in model_info.safetensors.values():
|
| 56 |
+
if isinstance(file_info, dict) and 'size' in file_info:
|
| 57 |
+
total_size += file_info['size']
|
| 58 |
+
elif hasattr(file_info, 'size'):
|
| 59 |
+
total_size += file_info.size
|
| 60 |
+
|
| 61 |
+
if total_size > 0:
|
| 62 |
+
size_gb = total_size / (1024**3)
|
| 63 |
+
print(f" Model size: {size_gb:.2f} GB")
|
| 64 |
+
|
| 65 |
+
# Skip if too large (>10GB on free tier)
|
| 66 |
+
if size_gb > 10:
|
| 67 |
+
raise Exception(f"Model too large for free tier: {size_gb:.2f} GB (max 10GB)")
|
| 68 |
|
| 69 |
except Exception as e:
|
| 70 |
raise Exception(f"Model validation failed: {str(e)}")
|