Commit ·
eb993f9
1
Parent(s): 9965b9d
log time for inference and GPU used
Browse files
app.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
import
|
| 2 |
from PIL import Image
|
| 3 |
import gradio as gr
|
| 4 |
from glob import glob
|
|
@@ -56,8 +56,11 @@ def calc_probs(prompt, images):
|
|
| 56 |
|
| 57 |
def predict(prompt, image_1, image_2):
|
| 58 |
print(f"Starting prediction for prompt: {prompt}")
|
|
|
|
| 59 |
probs = calc_probs(prompt, [image_1, image_2])
|
| 60 |
-
print(f"Prediction: {probs}")
|
|
|
|
|
|
|
| 61 |
return str(round(probs[0], 3)), str(round(probs[1], 3))
|
| 62 |
|
| 63 |
|
|
|
|
| 1 |
+
import time
|
| 2 |
from PIL import Image
|
| 3 |
import gradio as gr
|
| 4 |
from glob import glob
|
|
|
|
| 56 |
|
| 57 |
def predict(prompt, image_1, image_2):
|
| 58 |
print(f"Starting prediction for prompt: {prompt}")
|
| 59 |
+
start_time = time.time()
|
| 60 |
probs = calc_probs(prompt, [image_1, image_2])
|
| 61 |
+
print(f"Prediction: {probs:.3f} ({time.time() - start_time:.2f} seconds, ) ")
|
| 62 |
+
if device == "cuda":
|
| 63 |
+
print(f"GPU mem used: {round(torch.cuda.max_memory_allocated(device) / 1024 / 1024 / 1024, 2)} GB")
|
| 64 |
return str(round(probs[0], 3)), str(round(probs[1], 3))
|
| 65 |
|
| 66 |
|