Spaces:
Running
on
Zero
Running
on
Zero
Update inference.py
Browse files- inference.py +2 -1
inference.py
CHANGED
|
@@ -10,6 +10,7 @@ import os
|
|
| 10 |
import requests
|
| 11 |
import time
|
| 12 |
from pathlib import Path
|
|
|
|
| 13 |
|
| 14 |
# Check CUDA availability
|
| 15 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
@@ -165,7 +166,7 @@ class GenerativeInferenceModel:
|
|
| 165 |
# Store the model for future use
|
| 166 |
self.models[model_type] = model
|
| 167 |
return model
|
| 168 |
-
|
| 169 |
def inference(self, image, model_type, config):
|
| 170 |
# Load model if not already loaded
|
| 171 |
model = self.load_model(model_type)
|
|
|
|
| 10 |
import requests
|
| 11 |
import time
|
| 12 |
from pathlib import Path
|
| 13 |
+
from spaces import GPU
|
| 14 |
|
| 15 |
# Check CUDA availability
|
| 16 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
|
| 166 |
# Store the model for future use
|
| 167 |
self.models[model_type] = model
|
| 168 |
return model
|
| 169 |
+
@GPU
|
| 170 |
def inference(self, image, model_type, config):
|
| 171 |
# Load model if not already loaded
|
| 172 |
model = self.load_model(model_type)
|