ba848b8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{ "architectures": [ "ChessForCausalLM" ], "dtype": "float32", "model_type": "chess_transformer", "n_ctx": 256, "n_embd": 128, "n_head": 4, "n_inner": 512, "n_layer": 4, "transformers_version": "4.57.3", "vocab_size": 679 }