Update app.py
Browse files
app.py
CHANGED
|
@@ -70,6 +70,8 @@ from transformers import T5Tokenizer, T5ForConditionalGeneration
|
|
| 70 |
|
| 71 |
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-xl")
|
| 72 |
model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-xl", device_map = "auto")
|
|
|
|
|
|
|
| 73 |
|
| 74 |
# We are running FP32!
|
| 75 |
|
|
@@ -91,8 +93,6 @@ def process():
|
|
| 91 |
##input_ids = tokenizer(my_text, return_tensors = "pt").input_ids.to("cuda")
|
| 92 |
|
| 93 |
#From Here
|
| 94 |
-
# Move the model to the GPU
|
| 95 |
-
model = model.to('cuda')
|
| 96 |
|
| 97 |
# User input
|
| 98 |
user_input = st.text_input("Գրեք ձեր հարցը...", "")
|
|
|
|
| 70 |
|
| 71 |
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-xl")
|
| 72 |
model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-xl", device_map = "auto")
|
| 73 |
+
# Move the model to the GPU
|
| 74 |
+
model = model.to('cuda')
|
| 75 |
|
| 76 |
# We are running FP32!
|
| 77 |
|
|
|
|
| 93 |
##input_ids = tokenizer(my_text, return_tensors = "pt").input_ids.to("cuda")
|
| 94 |
|
| 95 |
#From Here
|
|
|
|
|
|
|
| 96 |
|
| 97 |
# User input
|
| 98 |
user_input = st.text_input("Գրեք ձեր հարցը...", "")
|