Spaces:
Sleeping
Sleeping
adding prints to the app to see if cuda is available or not
Browse files
app.py
CHANGED
|
@@ -3,6 +3,12 @@ import time
|
|
| 3 |
import streamlit as st
|
| 4 |
from transformers import pipeline, Conversation, AutoTokenizer
|
| 5 |
from langdetect import detect
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
# choose your model here by setting model_chosen_id equal to 1 or 2
|
| 8 |
model_chosen_id = 2
|
|
|
|
| 3 |
import streamlit as st
|
| 4 |
from transformers import pipeline, Conversation, AutoTokenizer
|
| 5 |
from langdetect import detect
|
| 6 |
+
import torch
|
| 7 |
+
|
| 8 |
+
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
| 9 |
+
# True
|
| 10 |
+
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
| 11 |
+
# Tesla T4
|
| 12 |
|
| 13 |
# choose your model here by setting model_chosen_id equal to 1 or 2
|
| 14 |
model_chosen_id = 2
|