{ "architectures": [ "ChessForCausalLM" ], "auto_map": { "AutoConfig": "model.ChessConfig", "AutoModelForCausalLM": "model.ChessForCausalLM" }, "dtype": "float32", "hidden_size": 128, "model_type": "chess_transformer", "n_ctx": 256, "n_embd": 128, "n_head": 8, "n_inner": 256, "n_layer": 6, "num_attention_heads": 8, "num_hidden_layers": 6, "transformers_version": "4.57.3", "vocab_size": 1344 }