File size: 406 Bytes
6a60611 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 | {
"architectures": [
"CollinsModel"
],
"attention_probs_dropout_prob": 0.1,
"dtype": "float32",
"hash_seed": 42,
"hidden_dropout_prob": 0.1,
"hidden_size": 256,
"intermediate_size": 1024,
"max_position_embeddings": 512,
"model_type": "collins",
"num_attention_heads": 8,
"num_buckets": 2048,
"num_hidden_layers": 3,
"transformers_version": "4.57.1",
"vocab_size": 30522
}
|