Update README.md
Browse files
README.md
CHANGED
|
@@ -57,7 +57,7 @@ inputs = tokenizer(
|
|
| 57 |
|
| 58 |
# Generate the sequences
|
| 59 |
with torch.inference_mode():
|
| 60 |
-
outputs = model.generate(**inputs, max_new_tokens=32)
|
| 61 |
|
| 62 |
# Decode the generated sequences
|
| 63 |
decoded_sequences = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
|
|
|
| 57 |
|
| 58 |
# Generate the sequences
|
| 59 |
with torch.inference_mode():
|
| 60 |
+
outputs = model.generate(**inputs, max_new_tokens=32, temperature=0.00001, top_k=1)
|
| 61 |
|
| 62 |
# Decode the generated sequences
|
| 63 |
decoded_sequences = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|