MDaytek commited on
Commit
e07124f
·
verified ·
1 Parent(s): cdc3a6a

Micro test upload (fixed v3)

Browse files
Files changed (7) hide show
  1. README.md +8 -0
  2. config.json +14 -0
  3. model.py +68 -0
  4. pytorch_model.bin +3 -0
  5. tokenizer.py +41 -0
  6. train.py +73 -0
  7. vocab.json +511 -0
README.md ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ tags:
4
+ - chess
5
+ license: mit
6
+ ---
7
+ # Micro Test
8
+ This is just a test upload for MDaytek/Model-v3-mdaytek.
config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "ChessForCausalLM"
4
+ ],
5
+ "dtype": "float32",
6
+ "model_type": "chess_lm",
7
+ "n_ctx": 256,
8
+ "n_embd": 128,
9
+ "n_head": 4,
10
+ "n_layer": 4,
11
+ "n_positions": 256,
12
+ "transformers_version": "4.57.3",
13
+ "vocab_size": 509
14
+ }
model.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import torch
3
+ import torch.nn as nn
4
+ from transformers import PreTrainedModel, PretrainedConfig
5
+
6
+ class ChessConfig(PretrainedConfig):
7
+ model_type = "chess_lm"
8
+ def __init__(
9
+ self,
10
+ vocab_size=1200,
11
+ n_positions=256,
12
+ n_embd=128,
13
+ n_layer=4,
14
+ n_head=4,
15
+ n_ctx=256,
16
+ tie_word_embeddings=True,
17
+ **kwargs,
18
+ ):
19
+ self.vocab_size = vocab_size
20
+ self.n_positions = n_positions
21
+ self.n_embd = n_embd
22
+ self.n_layer = n_layer
23
+ self.n_head = n_head
24
+ self.n_ctx = n_ctx
25
+ self.tie_word_embeddings = tie_word_embeddings
26
+ super().__init__(**kwargs)
27
+
28
+ class ChessForCausalLM(PreTrainedModel):
29
+ config_class = ChessConfig
30
+
31
+ def __init__(self, config):
32
+ super().__init__(config)
33
+ self.config = config
34
+ self.token_embedding = nn.Embedding(config.vocab_size, config.n_embd)
35
+ self.position_embedding = nn.Embedding(config.n_positions, config.n_embd)
36
+ encoder_layer = nn.TransformerEncoderLayer(
37
+ d_model=config.n_embd, nhead=config.n_head, dim_feedforward=config.n_embd * 4,
38
+ batch_first=True, norm_first=True
39
+ )
40
+ self.blocks = nn.TransformerEncoder(encoder_layer, num_layers=config.n_layer)
41
+ self.ln_f = nn.LayerNorm(config.n_embd)
42
+ self.head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
43
+ if config.tie_word_embeddings:
44
+ self.head.weight = self.token_embedding.weight
45
+ self.post_init()
46
+
47
+ def get_input_embeddings(self): return self.token_embedding
48
+ def set_input_embeddings(self, value): self.token_embedding = value
49
+
50
+ def forward(self, input_ids, labels=None, **kwargs):
51
+ B, T = input_ids.shape
52
+ tok_emb = self.token_embedding(input_ids)
53
+ pos_emb = self.position_embedding(torch.arange(T, device=input_ids.device))
54
+ x = tok_emb + pos_emb
55
+ mask = torch.triu(torch.ones(T, T, device=input_ids.device) * float('-inf'), diagonal=1)
56
+ x = self.blocks(x, mask=mask, is_causal=True)
57
+ x = self.ln_f(x)
58
+ logits = self.head(x)
59
+ loss = None
60
+ if labels is not None:
61
+ shift_logits = logits[..., :-1, :].contiguous()
62
+ shift_labels = labels[..., 1:].contiguous()
63
+ loss_fct = nn.CrossEntropyLoss()
64
+ loss = loss_fct(shift_logits.view(-1, self.config.vocab_size), shift_labels.view(-1))
65
+ return (loss, logits) if loss is not None else logits
66
+
67
+ def print_parameter_budget(config):
68
+ print(f"Model params: Check")
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00546273a45b93e2b8d7f7d880cd22340ccb06f5239c718f8c6c3b6f5f540516
3
+ size 3581859
tokenizer.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import json
3
+ import os
4
+ import torch
5
+
6
+ class ChessTokenizer:
7
+ def __init__(self, vocab=None):
8
+ self.vocab = vocab if vocab else {}
9
+ self.id_to_token = {v: k for k, v in self.vocab.items()}
10
+ self.pad_token_id = self.vocab.get("[PAD]", 0)
11
+ self.bos_token_id = self.vocab.get("[BOS]", 1)
12
+ self.eos_token_id = self.vocab.get("[EOS]", 2)
13
+
14
+ @property
15
+ def vocab_size(self):
16
+ return len(self.vocab)
17
+
18
+ def _convert_token_to_id(self, token):
19
+ return self.vocab.get(token, self.vocab.get("[UNK]"))
20
+
21
+ def pad(self, encoded_inputs, padding=True, max_length=None, pad_to_multiple_of=None, return_tensors=None):
22
+ batch_ids = [x["input_ids"] for x in encoded_inputs]
23
+ max_len = max(len(ids) for ids in batch_ids)
24
+ padded_batch = []
25
+ for ids in batch_ids:
26
+ padded_ids = ids + [self.pad_token_id] * (max_len - len(ids))
27
+ padded_batch.append(padded_ids)
28
+ if return_tensors == "pt":
29
+ return {"input_ids": torch.tensor(padded_batch, dtype=torch.long)}
30
+ return {"input_ids": padded_batch}
31
+
32
+ def save_pretrained(self, save_directory):
33
+ os.makedirs(save_directory, exist_ok=True)
34
+ with open(os.path.join(save_directory, "vocab.json"), "w") as f:
35
+ json.dump(self.vocab, f, indent=4)
36
+
37
+ @classmethod
38
+ def from_pretrained(cls, load_directory):
39
+ with open(os.path.join(load_directory, "vocab.json"), "r") as f:
40
+ vocab = json.load(f)
41
+ return cls(vocab)
train.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import json
3
+ import os
4
+ from collections import Counter
5
+ from datasets import load_dataset, Dataset
6
+ from transformers import Trainer, TrainingArguments, DataCollatorForLanguageModeling
7
+ from src.tokenizer import ChessTokenizer
8
+ from src.model import ChessConfig, ChessForCausalLM, print_parameter_budget
9
+
10
+ def build_vocab(dataset, max_vocab=1200):
11
+ counter = Counter()
12
+ for game in dataset:
13
+ moves = game["text"].split()
14
+ counter.update(moves)
15
+ special = ["[PAD]", "[BOS]", "[EOS]", "[UNK]"]
16
+ vocab_tokens = special + [t for t, _ in counter.most_common(max_vocab - len(special))]
17
+ vocab = {tok: i for i, tok in enumerate(vocab_tokens)}
18
+ return vocab
19
+
20
+ def encode_game(game, tokenizer, max_len=256):
21
+ moves = game["text"].split()
22
+ tokens = ["[BOS]"] + moves + ["[EOS]"]
23
+ tokens = tokens[:max_len]
24
+ ids = [tokenizer._convert_token_to_id(t) for t in tokens]
25
+ ids += [tokenizer.pad_token_id] * (max_len - len(ids))
26
+ return ids
27
+
28
+ def main():
29
+ print("Loading MICRO dataset...")
30
+ raw_ds = load_dataset("dlouapre/lichess_2025-01_1M", split="train[:10]")
31
+
32
+ print("Building tokenizer...")
33
+ vocab = build_vocab(raw_ds)
34
+ tokenizer = ChessTokenizer(vocab)
35
+
36
+ config = ChessConfig(vocab_size=tokenizer.vocab_size)
37
+ model = ChessForCausalLM(config)
38
+
39
+ print("Tokenizing dataset...")
40
+ input_ids = [encode_game(g, tokenizer) for g in raw_ds]
41
+ ds = Dataset.from_dict({"input_ids": input_ids})
42
+
43
+ train_output = "./my_model"
44
+
45
+ args = TrainingArguments(
46
+ output_dir=train_output,
47
+ per_device_train_batch_size=2,
48
+ num_train_epochs=1,
49
+ save_strategy="no",
50
+ report_to="none",
51
+ use_cpu=False
52
+ )
53
+
54
+ trainer = Trainer(
55
+ model=model,
56
+ args=args,
57
+ train_dataset=ds,
58
+ data_collator=DataCollatorForLanguageModeling(tokenizer, mlm=False),
59
+ )
60
+
61
+ print("🚀 Starting MICRO training...")
62
+ trainer.train()
63
+
64
+ print("💾 Saving model locally...")
65
+ final_path = os.path.join(train_output, "final_model")
66
+
67
+ # SAUVEGARDE CORRIGÉE
68
+ model.save_pretrained(final_path, safe_serialization=False)
69
+ tokenizer.save_pretrained(final_path)
70
+ print("✅ Training complete.")
71
+
72
+ if __name__ == "__main__":
73
+ main()
vocab.json ADDED
@@ -0,0 +1,511 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 0,
3
+ "[BOS]": 1,
4
+ "[EOS]": 2,
5
+ "[UNK]": 3,
6
+ "WPe2e4": 4,
7
+ "BNg8f6": 5,
8
+ "BKe8g8(o)": 6,
9
+ "BPg7g6": 7,
10
+ "WPd2d4": 8,
11
+ "WNg1f3": 9,
12
+ "WPh2h3": 10,
13
+ "BPd7d5": 11,
14
+ "BNb8c6": 12,
15
+ "WNb1c3": 13,
16
+ "BPe7e5": 14,
17
+ "WBf1e2": 15,
18
+ "BPc7c6": 16,
19
+ "WKe1g1(o)": 17,
20
+ "WNb1d2": 18,
21
+ "BPd7d6": 19,
22
+ "BPe7e6": 20,
23
+ "WPb2b3": 21,
24
+ "WPa2a4": 22,
25
+ "WBc1e3": 23,
26
+ "WPc2c3": 24,
27
+ "WPg2g3": 25,
28
+ "BPb7b5": 26,
29
+ "WPd2d3": 27,
30
+ "BBc8g4": 28,
31
+ "WBe2f3(x)": 29,
32
+ "BPb7b6": 30,
33
+ "BNc6e5(x)": 31,
34
+ "BNe5f3(x+)": 32,
35
+ "WPh3h4": 33,
36
+ "BPf7f6": 34,
37
+ "BPa7a5": 35,
38
+ "WPh4h5": 36,
39
+ "WKg1h2": 37,
40
+ "BPf6f5": 38,
41
+ "WBc1f4": 39,
42
+ "BBf8d6": 40,
43
+ "WPc3c4": 41,
44
+ "BNf6e4": 42,
45
+ "BPf7f5": 43,
46
+ "BPa7a6": 44,
47
+ "WBf1d3": 45,
48
+ "WNd2b3": 46,
49
+ "BNb8d7": 47,
50
+ "WPf2f4": 48,
51
+ "BBf8g7": 49,
52
+ "BBf8c5": 50,
53
+ "BNf6h5": 51,
54
+ "BNh5f6": 52,
55
+ "WQf3g3": 53,
56
+ "BNf6g4": 54,
57
+ "BPh7h5": 55,
58
+ "WPf2f3": 56,
59
+ "WNc3d5": 57,
60
+ "WPe4e5": 58,
61
+ "BBg4f3(x)": 59,
62
+ "BPc6c5": 60,
63
+ "WQd1e2": 61,
64
+ "BNg8e7": 62,
65
+ "BPb6c5(x)": 63,
66
+ "WQe2b5": 64,
67
+ "WBc1d2": 65,
68
+ "WRa1d1": 66,
69
+ "WPf2g3(x)": 67,
70
+ "BNh4g6": 68,
71
+ "BPe6e5": 69,
72
+ "BPd5d4": 70,
73
+ "BRf8e8": 71,
74
+ "WPg3g4": 72,
75
+ "BPf5f4": 73,
76
+ "BPe4e3": 74,
77
+ "WPg4g5": 75,
78
+ "BKg8h8": 76,
79
+ "WPd4e5(x)": 77,
80
+ "BQd8d6": 78,
81
+ "WKg1h1": 79,
82
+ "BRa8e8": 80,
83
+ "WRa1e1": 81,
84
+ "BPd5e4(x)": 82,
85
+ "WRe1e4(x)": 83,
86
+ "BPh7h6": 84,
87
+ "WPa4a5": 85,
88
+ "BKh7h6": 86,
89
+ "BBc8e6": 87,
90
+ "BRa8d8": 88,
91
+ "WRf1e1": 89,
92
+ "WQd1d3": 90,
93
+ "BQa5b5": 91,
94
+ "BNd7f6": 92,
95
+ "WRh1e1": 93,
96
+ "BRf8d8": 94,
97
+ "WPh5g6(x)": 95,
98
+ "BPh7g6(x)": 96,
99
+ "WPc2c4": 97,
100
+ "BPc7c5": 98,
101
+ "WPd4d5": 99,
102
+ "BBc8d7": 100,
103
+ "WPa2a3": 101,
104
+ "WPb2b4": 102,
105
+ "BQd8e8": 103,
106
+ "WPe4f5(x)": 104,
107
+ "BPe6f5(x)": 105,
108
+ "WBf4e3": 106,
109
+ "BKg8g7": 107,
110
+ "BKg7h6": 108,
111
+ "BKh6h5": 109,
112
+ "WPg2g4": 110,
113
+ "WQd1h5": 111,
114
+ "WQh5e5(x+)": 112,
115
+ "BBc5e7": 113,
116
+ "WQe5f4": 114,
117
+ "WQf4f3": 115,
118
+ "WBe2g4(x)": 116,
119
+ "BBc8g4(x)": 117,
120
+ "BPh5h4": 118,
121
+ "WQg3g4(x)": 119,
122
+ "BBe7f8": 120,
123
+ "WBc1g5": 121,
124
+ "BQd8d7": 122,
125
+ "WNd5f6(+)": 123,
126
+ "BNf6g8": 124,
127
+ "WPd4c5(x)": 125,
128
+ "BBf8c5(x)": 126,
129
+ "WNc3a4": 127,
130
+ "WNa4c5(x)": 128,
131
+ "WQb5c5(x)": 129,
132
+ "WQc5c3": 130,
133
+ "WQc3f3(x)": 131,
134
+ "BNe7f5": 132,
135
+ "BRa8b8": 133,
136
+ "WBd2c3": 134,
137
+ "BQd8g5": 135,
138
+ "BNf5h4": 136,
139
+ "BQg5g3(x)": 137,
140
+ "WRd1d4": 138,
141
+ "BRb8b5": 139,
142
+ "WRd4g4": 140,
143
+ "WBc3d2": 141,
144
+ "BRb5c5": 142,
145
+ "WRf1c1": 143,
146
+ "BNg6f8": 144,
147
+ "WPh5h6": 145,
148
+ "WRg4h4": 146,
149
+ "BPe5e4": 147,
150
+ "WBd2f4": 148,
151
+ "BPd4d3": 149,
152
+ "WBf4d6": 150,
153
+ "BRc5c2(x)": 151,
154
+ "WRc1c2(x)": 152,
155
+ "BPd3c2(x)": 153,
156
+ "WBd6f4": 154,
157
+ "BNf8e6": 155,
158
+ "WBf4c1": 156,
159
+ "BNe6d4": 157,
160
+ "BNd4e2": 158,
161
+ "WBe3d2": 159,
162
+ "WBd2e3(x)": 160,
163
+ "BPf4e3(x)": 161,
164
+ "BPc2c1(Q)": 162,
165
+ "WRh4c4": 163,
166
+ "BQc1g1(+)": 164,
167
+ "WKh2h3": 165,
168
+ "BRe8f8": 166,
169
+ "WRc4c7": 167,
170
+ "BNe2f4(+)": 168,
171
+ "WKh3g3": 169,
172
+ "BQg1e1(+)": 170,
173
+ "WKg3h2": 171,
174
+ "BQe1h4(+)": 172,
175
+ "WKh2g1": 173,
176
+ "BQh4g5(x)": 174,
177
+ "WRc7g7(+)": 175,
178
+ "WRg7h7(x+)": 176,
179
+ "BKh8h7(x)": 177,
180
+ "WKg1f1": 178,
181
+ "BQg5h6(x)": 179,
182
+ "WPe2f3(x)": 180,
183
+ "WBf4e5(x)": 181,
184
+ "BBd6e5(x)": 182,
185
+ "BBe5h2": 183,
186
+ "BBh2g3(x)": 184,
187
+ "BQd6g3(x)": 185,
188
+ "WQd1e1": 186,
189
+ "BQg3h3(x+)": 187,
190
+ "WKh1g1": 188,
191
+ "WQe1f2": 189,
192
+ "BRe8e6": 190,
193
+ "WPf3e4(x)": 191,
194
+ "BRe6f6": 192,
195
+ "WQf2g2": 193,
196
+ "BRf6f1(x+)": 194,
197
+ "WBe2f1(x)": 195,
198
+ "BQh3g2(x+)": 196,
199
+ "WBf1g2(x)": 197,
200
+ "BRe8d8": 198,
201
+ "WNd2f3": 199,
202
+ "BRd8d3": 200,
203
+ "WPc4b5(x)": 201,
204
+ "BRd3d5": 202,
205
+ "BRd5h5(+)": 203,
206
+ "WKh2g3": 204,
207
+ "WRe4e5": 205,
208
+ "WKg3f4": 206,
209
+ "BPg6g5(+)": 207,
210
+ "WKf4f5(x)": 208,
211
+ "BPg5g4(+)": 209,
212
+ "WKf5f6": 210,
213
+ "BPg4f3(x)": 211,
214
+ "WRe5h5(x)": 212,
215
+ "BKg8h7": 213,
216
+ "WBg2f3(x)": 214,
217
+ "WRh5h1": 215,
218
+ "BPc6b5(x)": 216,
219
+ "WRh1g1": 217,
220
+ "BPb5a4(x)": 218,
221
+ "WPb3a4(x)": 219,
222
+ "BPh6h5": 220,
223
+ "WBf3h5(x)": 221,
224
+ "WBh5e2": 222,
225
+ "BKh6h7": 223,
226
+ "WBe2d1": 224,
227
+ "WRg1h1(+*)": 225,
228
+ "WPe4d5(x)": 226,
229
+ "BPc6d5(x)": 227,
230
+ "WBd3e2": 228,
231
+ "BQd8c7": 229,
232
+ "WNb3d4": 230,
233
+ "WRe1e6(x)": 231,
234
+ "BPf7e6(x)": 232,
235
+ "WNd4e6(x)": 233,
236
+ "BQc7a5": 234,
237
+ "WNe6f8(x)": 235,
238
+ "BRd8f8(x)": 236,
239
+ "WBe3d4": 237,
240
+ "BBd6c5": 238,
241
+ "BBc5d4(x)": 239,
242
+ "WQd3d4(x)": 240,
243
+ "BQb5b2(x)": 241,
244
+ "WQd4d5(x+)": 242,
245
+ "WBf3e4(x)": 243,
246
+ "BQb2f2(x+)": 244,
247
+ "BQf2g3": 245,
248
+ "WQd5e6": 246,
249
+ "WQe6g6": 247,
250
+ "BQg3g6(x)": 248,
251
+ "WBe4g6(x)": 249,
252
+ "BRf8f2": 250,
253
+ "WPd3e4(x)": 251,
254
+ "BQd8d1(x+)": 252,
255
+ "WKe1d1(x)": 253,
256
+ "WKd1e2": 254,
257
+ "BNf6e4(x)": 255,
258
+ "BNe4d2(x)": 256,
259
+ "WNf3d2(x)": 257,
260
+ "WBf1g2": 258,
261
+ "BNf6d5": 259,
262
+ "WKe2d1": 260,
263
+ "BNd5e3(x+)": 261,
264
+ "WRe1e3(x)": 262,
265
+ "BBc8b7": 263,
266
+ "WNd2e4": 264,
267
+ "BBd6e7": 265,
268
+ "WKd1c1": 266,
269
+ "WNe4g5": 267,
270
+ "BBe7g5(x)": 268,
271
+ "WPf4g5(x)": 269,
272
+ "WPh2h4": 270,
273
+ "BKg8f8": 271,
274
+ "BKf8e7": 272,
275
+ "BRd8d4": 273,
276
+ "WBg2f3": 274,
277
+ "BRd8d7": 275,
278
+ "WBf3b7(x)": 276,
279
+ "BRd7b7(x)": 277,
280
+ "WRe3e4": 278,
281
+ "BRb7d7": 279,
282
+ "WKc1b2": 280,
283
+ "BRd4e4(x)": 281,
284
+ "BRd7d5": 282,
285
+ "BRd5g5(x)": 283,
286
+ "WPa5b6(x)": 284,
287
+ "BPa7b6(x)": 285,
288
+ "WRe4a4": 286,
289
+ "WPg4f5(x)": 287,
290
+ "BPg6f5(x)": 288,
291
+ "WKb2c1": 289,
292
+ "BRg5g4": 290,
293
+ "WRa4a6": 291,
294
+ "BRg4b4": 292,
295
+ "WKc1d2": 293,
296
+ "WKd2d3": 294,
297
+ "BPe5e4(+)": 295,
298
+ "WKd3e3": 296,
299
+ "BKe7e6": 297,
300
+ "WRa6a8": 298,
301
+ "BPc5c4": 299,
302
+ "WPb3c4(x)": 300,
303
+ "BRb4c4(x)": 301,
304
+ "WRa8e8(+)": 302,
305
+ "BKe6f7": 303,
306
+ "WRe8b8": 304,
307
+ "BRc4c3(+)": 305,
308
+ "WKe3f4": 306,
309
+ "BRc3f3(+)": 307,
310
+ "WKf4e5": 308,
311
+ "WRb8b6(x)": 309,
312
+ "BPe3e2": 310,
313
+ "WRb6b7(+)": 311,
314
+ "BKf7e8": 312,
315
+ "WKe5f6": 313,
316
+ "BKe8d8": 314,
317
+ "WRb7e7": 315,
318
+ "BRf3f2": 316,
319
+ "WRe7e5": 317,
320
+ "WKf6f5": 318,
321
+ "BPf4f3": 319,
322
+ "WKf5f4": 320,
323
+ "BRf2f1": 321,
324
+ "BPe2e1(Q)": 322,
325
+ "WRe5e1(x)": 323,
326
+ "BRf1e1(x)": 324,
327
+ "WKf4f3(x)": 325,
328
+ "BKd8d7": 326,
329
+ "BBg7c3(x+)": 327,
330
+ "WPb2c3(x)": 328,
331
+ "BNd7e5": 329,
332
+ "WBe3h6": 330,
333
+ "BQd8a5": 331,
334
+ "BBc8a6": 332,
335
+ "WQd3d2": 333,
336
+ "BKe8c8(O)": 334,
337
+ "BPb5b4": 335,
338
+ "WRf1b1": 336,
339
+ "BPb4c3(x)": 337,
340
+ "WQd2f4": 338,
341
+ "BKc8d7": 339,
342
+ "WBf3g4(+)": 340,
343
+ "BKd7e8": 341,
344
+ "WBh6g7": 342,
345
+ "BRh8g8": 343,
346
+ "WBg7f6(x)": 344,
347
+ "BPe7f6(x)": 345,
348
+ "WQf4f6(x)": 346,
349
+ "WRb1b5(x)": 347,
350
+ "BBa6b5(x)": 348,
351
+ "WPe2e3": 349,
352
+ "WNg1e2": 350,
353
+ "BBd7c6": 351,
354
+ "WQd1c2": 352,
355
+ "WNb3a5": 353,
356
+ "BBc6d5": 354,
357
+ "BBd5c6": 355,
358
+ "WNa5c6(x)": 356,
359
+ "BNb8c6(x)": 357,
360
+ "BNc6b8": 358,
361
+ "WRa1c1": 359,
362
+ "WPe3e4": 360,
363
+ "BQe8f7": 361,
364
+ "WNe2g3": 362,
365
+ "WRe1e2": 363,
366
+ "WPd5c6(x)": 364,
367
+ "BNe7c6(x)": 365,
368
+ "WRc1e1": 366,
369
+ "BNc6d4": 367,
370
+ "WQc2d2": 368,
371
+ "BNd4e2(x+)": 369,
372
+ "WRe1e2(x)": 370,
373
+ "BRe8e2(x)": 371,
374
+ "WNg3e2(x)": 372,
375
+ "WNe2d4": 373,
376
+ "WBd3e4(x)": 374,
377
+ "BPf5e4(x)": 375,
378
+ "BPd6d5": 376,
379
+ "WPc4c5": 377,
380
+ "WPb4c5(x)": 378,
381
+ "BRd8c8": 379,
382
+ "WPc5c6": 380,
383
+ "BBg7d4(x)": 381,
384
+ "WBe3d4(x)": 382,
385
+ "BRc8c6(x)": 383,
386
+ "WPa3a4": 384,
387
+ "BRc6c4": 385,
388
+ "WBd4b6": 386,
389
+ "BRc4a4(x)": 387,
390
+ "WQd2c2": 388,
391
+ "BRa4a1(+)": 389,
392
+ "BQf7f4(+)": 390,
393
+ "BQf4f3": 391,
394
+ "WQc2c8(+)": 392,
395
+ "WBb6d4(+)": 393,
396
+ "WBd4e3(+)": 394,
397
+ "WPg3g4(+)": 395,
398
+ "BKh5h4": 396,
399
+ "WQc8d8(+)": 397,
400
+ "WQd1d2": 398,
401
+ "WKe1c1(O)": 399,
402
+ "WBe3g5": 400,
403
+ "WBg5h4": 401,
404
+ "BNg4h6": 402,
405
+ "BNh6f7": 403,
406
+ "WBd3c4": 404,
407
+ "BQe8d8": 405,
408
+ "WBh4g3": 406,
409
+ "BNc6a5": 407,
410
+ "WBc4d3": 408,
411
+ "BNa5c6": 409,
412
+ "BNc6b4": 410,
413
+ "WRe1h1": 411,
414
+ "BNb4d3(x+)": 412,
415
+ "WQd2d3(x)": 413,
416
+ "BNf7g5(x)": 414,
417
+ "WBg3h4": 415,
418
+ "BBg7f6": 416,
419
+ "WBh4g5(x)": 417,
420
+ "BBf6g5(x+)": 418,
421
+ "WKc1b1": 419,
422
+ "BQd8f6": 420,
423
+ "WQd3c4(+)": 421,
424
+ "WQc4c7(x)": 422,
425
+ "BQf6f7": 423,
426
+ "WNf3g5(x)": 424,
427
+ "BQf7e7": 425,
428
+ "WRh1h7(+)": 426,
429
+ "BKg7f6": 427,
430
+ "WRh7e7(x)": 428,
431
+ "BKf6e7(x)": 429,
432
+ "WRd1e1(+)": 430,
433
+ "BKe7f6": 431,
434
+ "WNg5h7(+)": 432,
435
+ "BKf6g7": 433,
436
+ "WNh7f8(x)": 434,
437
+ "BRa8f8(x)": 435,
438
+ "WQc7d7(x+)": 436,
439
+ "BRf8f7": 437,
440
+ "WQd7d6(x)": 438,
441
+ "BRf7f6": 439,
442
+ "BRf6d6(x)": 440,
443
+ "WKb1c2": 441,
444
+ "BPa5a4": 442,
445
+ "WKc2c3": 443,
446
+ "WNd5e3": 444,
447
+ "BPa4b3(x)": 445,
448
+ "WRe1h1(+*)": 446,
449
+ "BPe5f4(x)": 447,
450
+ "BBf8e7": 448,
451
+ "BBe7h4(+)": 449,
452
+ "WKe1e2": 450,
453
+ "WBc1f4(x)": 451,
454
+ "BBg4f3(x+)": 452,
455
+ "WKe2f3(x)": 453,
456
+ "BBh4g5": 454,
457
+ "BBg5f4(x)": 455,
458
+ "WPg3f4(x)": 456,
459
+ "BNg8h6": 457,
460
+ "WBf1h3": 458,
461
+ "BNc6e7": 459,
462
+ "BNe7g6": 460,
463
+ "BNg6h4(x+)": 461,
464
+ "WKf3g3": 462,
465
+ "WNc3d5(x)": 463,
466
+ "BQd8h4(+)": 464,
467
+ "WKg3f3": 465,
468
+ "BQh4h5(+)": 466,
469
+ "WKf3e3": 467,
470
+ "BQh5h4": 468,
471
+ "WNd5c7(x)": 469,
472
+ "BQh4g3(+)": 470,
473
+ "WKe3d2": 471,
474
+ "BQg3f4(x+)": 472,
475
+ "WKd2c3": 473,
476
+ "BRa8c8": 474,
477
+ "WQb5b7(x)": 475,
478
+ "BQf4e3(+)": 476,
479
+ "WKc3c4": 477,
480
+ "BQe3e2(+)": 478,
481
+ "WKc4c5": 479,
482
+ "BQe2c2(x+)": 480,
483
+ "WKc5d6": 481,
484
+ "BRf8d8(+)": 482,
485
+ "WKd6e6": 483,
486
+ "BRc8c7(x)": 484,
487
+ "WQb7b3": 485,
488
+ "BQc2c6(+*)": 486,
489
+ "BNc6b4(x)": 487,
490
+ "WPa3b4(x)": 488,
491
+ "BBc5b4(x+)": 489,
492
+ "BBb4c5": 490,
493
+ "BBe6d7": 491,
494
+ "WKe1f2": 492,
495
+ "WKf2g3": 493,
496
+ "BPh5h4(+)": 494,
497
+ "WKg3f2": 495,
498
+ "BNf6h7": 496,
499
+ "WBd2c1": 497,
500
+ "BPg7g5": 498,
501
+ "WBc1a3": 499,
502
+ "WBa3c5(x)": 500,
503
+ "BQd6c5(x)": 501,
504
+ "BQc5b6": 502,
505
+ "WRa1b1": 503,
506
+ "WNb3d4(x)": 504,
507
+ "BQb6d6": 505,
508
+ "WNd4b5": 506,
509
+ "BQd6d4(+)": 507,
510
+ "WNb5d4(x)": 508
511
+ }