File size: 1,066 Bytes
3c4d07d 4d642eb 3c4d07d | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 | {
"_from_model_config": false,
"assistant_confidence_threshold": 0.4,
"assistant_lookbehind": 10,
"bos_token_id": 643,
"decoder_start_token_id": 644,
"diversity_penalty": 0.0,
"do_sample": false,
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"encoder_repetition_penalty": 1.0,
"eos_token_id": [
643
],
"epsilon_cutoff": 0.0,
"eta_cutoff": 0.0,
"forced_decoder_ids": null,
"language": null,
"length_penalty": 1.0,
"max_length": 444,
"min_length": 0,
"no_repeat_ngram_size": 4,
"num_assistant_tokens": 20,
"num_assistant_tokens_schedule": "constant",
"num_beam_groups": 1,
"num_beams": 1,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": 643,
"remove_invalid_values": false,
"repetition_penalty": 1.5,
"return_dict_in_generate": false,
"target_lookbehind": 10,
"task": null,
"temperature": 1.0,
"top_k": 50,
"top_p": 1.0,
"transformers_version": "5.2.0",
"typical_p": 1.0,
"use_cache": true
}
|