File size: 424 Bytes
f8f08f0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 | {
"model_config": {
"n_layer": 12,
"n_head": 12,
"n_embd": 768,
"block_size": 1024,
"bias": false,
"dropout": 0.0,
"n_codes_total": 8,
"n_codes_given": 1,
"input_vocab_size": 1056,
"output_vocab_size": 1056
},
"model_type": "fine",
"parameter_count": 97924608,
"needs_tokenizer": true,
"best_val_loss": 2.5150986952781675,
"torch_dtype": "bfloat16"
}
|