File size: 204 Bytes
4eb760f | 1 2 3 4 5 6 7 8 9 10 11 12 13 | {
"_from_model_config": true,
"do_sample": true,
"eos_token_id": [
100265,
100257
],
"max_new_tokens": 32768,
"temperature": 0.6,
"top_p": 0.95,
"transformers_version": "4.57.1"
}
|