Update README.md
Browse files
README.md
CHANGED
|
@@ -16,8 +16,10 @@ pipeline_tag: text2text-generation
|
|
| 16 |
import torch
|
| 17 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
| 18 |
|
|
|
|
|
|
|
| 19 |
tokenizer = AutoTokenizer.from_pretrained('TeraSpace/dialofred')
|
| 20 |
-
model = AutoModelForSeq2SeqLM.from_pretrained('TeraSpace/dialofred', device_map=
|
| 21 |
while True:
|
| 22 |
text_inp = input("=>")
|
| 23 |
lm_text=f'<SC1>- {text_inp}\n- <extra_id_0>'
|
|
|
|
| 16 |
import torch
|
| 17 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
| 18 |
|
| 19 |
+
device = "cuda"
|
| 20 |
+
|
| 21 |
tokenizer = AutoTokenizer.from_pretrained('TeraSpace/dialofred')
|
| 22 |
+
model = AutoModelForSeq2SeqLM.from_pretrained('TeraSpace/dialofred', device_map=device).to(device)# Add torch_dtype=torch.bfloat16 to use less memory
|
| 23 |
while True:
|
| 24 |
text_inp = input("=>")
|
| 25 |
lm_text=f'<SC1>- {text_inp}\n- <extra_id_0>'
|