Update README.md
Browse files
README.md
CHANGED
|
@@ -134,6 +134,7 @@ pip install transformers torch
|
|
| 134 |
|
| 135 |
## 🧪 Esempio di utilizzo in Python
|
| 136 |
|
|
|
|
| 137 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 138 |
import torch
|
| 139 |
|
|
@@ -145,7 +146,7 @@ inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
|
|
| 145 |
|
| 146 |
outputs = model.generate(**inputs, max_new_tokens=100, temperature=0.7)
|
| 147 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
| 148 |
-
|
| 149 |
|
| 150 |
---
|
| 151 |
|
|
|
|
| 134 |
|
| 135 |
## 🧪 Esempio di utilizzo in Python
|
| 136 |
|
| 137 |
+
```python
|
| 138 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 139 |
import torch
|
| 140 |
|
|
|
|
| 146 |
|
| 147 |
outputs = model.generate(**inputs, max_new_tokens=100, temperature=0.7)
|
| 148 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
| 149 |
+
```
|
| 150 |
|
| 151 |
---
|
| 152 |
|