Sammy972 commited on
Commit
c618974
·
verified ·
1 Parent(s): a846d1f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -10
config.json CHANGED
@@ -1,20 +1,22 @@
1
  {
2
- "architectures": [
3
- "ChessForCausalLM"
4
- ],
5
- "bos_token_id": 1,
6
- "dropout": 0.1,
7
- "dtype": "float32",
8
- "eos_token_id": 2,
9
- "layer_norm_epsilon": 1e-05,
10
  "model_type": "chess_transformer",
 
11
  "n_ctx": 512,
12
  "n_embd": 100,
13
  "n_head": 5,
14
  "n_inner": 384,
15
  "n_layer": 8,
 
 
 
 
16
  "pad_token_id": 0,
17
  "tie_weights": true,
18
- "transformers_version": "4.57.3",
19
- "vocab_size": 85
20
  }
 
1
  {
2
+ "architectures": ["ChessForCausalLM"],
3
+ "auto_map": {
4
+ "AutoConfig": "model.ChessConfig",
5
+ "AutoModelForCausalLM": "model.ChessForCausalLM"
6
+ },
 
 
 
7
  "model_type": "chess_transformer",
8
+ "vocab_size": 85,
9
  "n_ctx": 512,
10
  "n_embd": 100,
11
  "n_head": 5,
12
  "n_inner": 384,
13
  "n_layer": 8,
14
+ "dropout": 0.1,
15
+ "layer_norm_epsilon": 1e-05,
16
+ "bos_token_id": 1,
17
+ "eos_token_id": 2,
18
  "pad_token_id": 0,
19
  "tie_weights": true,
20
+ "torch_dtype": "float32",
21
+ "transformers_version": "4.57.3"
22
  }