File size: 905 Bytes
59eab16 0a13a5c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 | {
"model_type": "gptj",
"vocab_size": 50257,
"n_positions": 2048,
"n_ctx": 2048,
"n_embd": 4096,
"n_layer": 28,
"n_head": 16,
"rotary_dim": 64,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-5,
"bos_token_id": 50256,
"eos_token_id": 50256,
"pad_token_id": 50256,
"task_specific_params": {
"text-generation": {
"do_sample": true,
"max_length": 50
}
},
"max_length": 20,
"min_length": 0,
"do_sample": false,
"early_stopping": false,
"num_beams": 1,
"temperature": 1.0,
"top_k": 50,
"top_p": 1.0,
"repetition_penalty": 1.0,
"length_penalty": 1.0,
"no_repeat_ngram_size": 0,
"encoder_no_repeat_ngram_size": 0,
"num_return_sequences": 1,
"chunk_size_feed_forward": 0,
"output_scores": false,
"return_dict_in_generate": false,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"remove_invalid_values": false
}
|