joonsak commited on
Commit
b7c3be4
·
verified ·
1 Parent(s): 4bf76eb

changed the max tokens

Browse files

max_new_tokens=192 oli 384

Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -31,7 +31,7 @@ def generate(question: str, level: str, step_by_step: bool) -> str:
31
  with torch.no_grad():
32
  out = model.generate(
33
  **inputs,
34
- max_new_tokens=384,
35
  do_sample=True,
36
  temperature=0.7,
37
  top_p=0.95,
 
31
  with torch.no_grad():
32
  out = model.generate(
33
  **inputs,
34
+ max_new_tokens=192,
35
  do_sample=True,
36
  temperature=0.7,
37
  top_p=0.95,