File size: 331 Bytes
f1413cd | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 | {
"model_type": "transformer_lite",
"architectures": [
"TransformerLite"
],
"vocab_size": 12288,
"hidden_size": 128,
"num_hidden_layers": 2,
"num_attention_heads": 4,
"intermediate_size": 512,
"max_position_embeddings": 64,
"model_format": "base",
"framework": "pytorch",
"device": "cpu"
} |