File size: 174 Bytes
26b7f76 | 1 2 3 4 5 6 7 8 9 10 11 | {
"architectures": ["GPT2LMHeadModel"],
"model_type": "gpt2",
"vocab_size": 1200,
"n_positions": 256,
"n_ctx": 256,
"n_embd": 80,
"n_layer": 3,
"n_head": 4
}
|
26b7f76 | 1 2 3 4 5 6 7 8 9 10 11 | {
"architectures": ["GPT2LMHeadModel"],
"model_type": "gpt2",
"vocab_size": 1200,
"n_positions": 256,
"n_ctx": 256,
"n_embd": 80,
"n_layer": 3,
"n_head": 4
}
|