Spaces:
Sleeping
Sleeping
changed the max tokens
Browse filesmax_new_tokens=192 oli 384
app.py
CHANGED
|
@@ -31,7 +31,7 @@ def generate(question: str, level: str, step_by_step: bool) -> str:
|
|
| 31 |
with torch.no_grad():
|
| 32 |
out = model.generate(
|
| 33 |
**inputs,
|
| 34 |
-
max_new_tokens=
|
| 35 |
do_sample=True,
|
| 36 |
temperature=0.7,
|
| 37 |
top_p=0.95,
|
|
|
|
| 31 |
with torch.no_grad():
|
| 32 |
out = model.generate(
|
| 33 |
**inputs,
|
| 34 |
+
max_new_tokens=192,
|
| 35 |
do_sample=True,
|
| 36 |
temperature=0.7,
|
| 37 |
top_p=0.95,
|