{ "architectures": [ "ChessForCausalLM" ], "dtype": "float32", "model_type": "chess_transformer", "n_ctx": 256, "n_embd": 128, "n_head": 4, "n_inner": 512, "n_layer": 4, "transformers_version": "4.57.3", "vocab_size": 679 }