File size: 171 Bytes
5e9254b
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
  "architectures": ["GPT2LMHeadModel"],
  "n_embd": 64,
  "n_head": 4,
  "n_layer": 4,
  "vocab_size": 61,
  "block_size": 32,
  "dropout": 0.1,
  "model_type": "gpt2"
}