File size: 290 Bytes
6e0bb74 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 | {
"n_layer": 8,
"n_head": 8,
"n_embd": 512,
"block_size": 512,
"bias": false,
"vocab_size": 50304,
"dropout": 0.1,
"model_type": "gpt2",
"architectures": [
"GPT"
],
"tokenizer_class": "GPT2Tokenizer",
"model_name": "htmLLM 50M Base"
} |