File size: 293 Bytes
476d595 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"architectures": [
"CustomStudentModel"
],
"dropout": 0.1,
"hidden_size": 512,
"intermediate_size": 2048,
"num_attention_heads": 8,
"num_decoder_layers": 6,
"num_encoder_layers": 6,
"torch_dtype": "float32",
"transformers_version": "4.47.0",
"vocab_size": 256204
}
|