Instructions to use logicreasoning/LogiT5 with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use logicreasoning/LogiT5 with Transformers:
# Load model directly from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("logicreasoning/LogiT5") model = AutoModelForSeq2SeqLM.from_pretrained("logicreasoning/LogiT5") - Notebooks
- Google Colab
- Kaggle
Update README.md
Browse files
README.md
CHANGED
|
@@ -1,4 +1,3 @@
|
|
| 1 |
-
```
|
| 2 |
import transformers
|
| 3 |
import datasets
|
| 4 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
@@ -13,4 +12,4 @@ input_text = '' #your input text here must be a string
|
|
| 13 |
input = tokenize(input_text, return_tensors='pt', padding=True).to(device)
|
| 14 |
model = model.to(device)
|
| 15 |
output = model.generate(*input, max_length=1024)
|
| 16 |
-
prediction = tokenize.decode(output[0],skip_special_tokens=True)
|
|
|
|
|
|
|
| 1 |
import transformers
|
| 2 |
import datasets
|
| 3 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
|
|
| 12 |
input = tokenize(input_text, return_tensors='pt', padding=True).to(device)
|
| 13 |
model = model.to(device)
|
| 14 |
output = model.generate(*input, max_length=1024)
|
| 15 |
+
prediction = tokenize.decode(output[0],skip_special_tokens=True)
|