File size: 363 Bytes
4918845 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"model_type": "transformer_lite",
"architectures": [
"TransformerLite"
],
"vocab_size": 12288,
"hidden_size": 128,
"num_hidden_layers": 2,
"num_attention_heads": 4,
"intermediate_size": 512,
"max_position_embeddings": 64,
"quantization_bits": 8,
"model_format": "quantized",
"framework": "pytorch",
"device": "cpu"
} |