File size: 349 Bytes
846dc7c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | {
"architectures": [
"TinyTransformerModel"
],
"dropout": 0.1,
"dtype": "float32",
"hidden_size": 128,
"intermediate_size": 512,
"max_position_embeddings": 512,
"model_type": "tinytransformer",
"num_attention_heads": 4,
"num_hidden_layers": 2,
"transformers_version": "5.0.0",
"use_cache": false,
"vocab_size": 21091
}
|