BubbleQ commited on
Commit
6347b48
·
verified ·
1 Parent(s): f350ad4

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -216,7 +216,7 @@ prompt = tokenizer.apply_chat_template(
216
  )
217
 
218
  sampling_params = SamplingParams(
219
- temperature=0.6, top_p=0.95, top_k=40
220
  )
221
 
222
  outputs = llm.generate([prompt], sampling_params)
 
216
  )
217
 
218
  sampling_params = SamplingParams(
219
+ temperature=0.6, top_p=0.95, top_k=40, max_new_tokens=1024
220
  )
221
 
222
  outputs = llm.generate([prompt], sampling_params)