BubbleQ commited on
Commit
3e4e7aa
·
verified ·
1 Parent(s): 820462e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -214,7 +214,7 @@ prompt = tokenizer.apply_chat_template(
214
  )
215
 
216
  sampling_params = SamplingParams(
217
- temperature=0.6, top_p=0.95, top_k=40, max_tokens=512
218
  )
219
 
220
  outputs = llm.generate([prompt], sampling_params)
 
214
  )
215
 
216
  sampling_params = SamplingParams(
217
+ temperature=0.6, top_p=0.95, top_k=40, max_new_tokens=1024
218
  )
219
 
220
  outputs = llm.generate([prompt], sampling_params)