File size: 270 Bytes
18dabb1 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"architectures": [
"GPTminiHF"
],
"context_length": 128,
"drop_rate": 0.1,
"dtype": "float32",
"emb_dim": 256,
"model_type": "gptmini",
"n_heads": 4,
"n_layers": 4,
"qkv_bias": false,
"transformers_version": "4.57.1",
"vocab_size": 50257
}
|