Eithannak commited on
Commit
24b3631
·
verified ·
1 Parent(s): 9a956b2

Chess Challenge submission by Eithannak

Browse files
Files changed (7) hide show
  1. README.md +26 -0
  2. config.json +21 -0
  3. pytorch_model.bin +3 -0
  4. special_tokens_map.json +6 -0
  5. tokenizer.py +154 -0
  6. tokenizer_config.json +47 -0
  7. vocab.json +514 -0
README.md ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ tags:
4
+ - chess
5
+ - llm-course
6
+ - chess-challenge
7
+ license: mit
8
+ ---
9
+
10
+ # chess-model-eithan-nakache-v3
11
+
12
+ Chess model submitted to the LLM Course Chess Challenge.
13
+
14
+ ## Submission Info
15
+
16
+ - **Submitted by**: [Eithannak](https://huggingface.co/Eithannak)
17
+ - **Parameters**: 965,040
18
+ - **Organization**: LLM-course
19
+
20
+ ## Model Details
21
+
22
+ - **Architecture**: Chess Transformer (GPT-style)
23
+ - **Vocab size**: 512
24
+ - **Embedding dim**: 120
25
+ - **Layers**: 6
26
+ - **Heads**: 4
config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "ChessForCausalLM"
4
+ ],
5
+ "bos_token_id": 1,
6
+ "dropout": 0.1,
7
+ "dtype": "float32",
8
+ "eos_token_id": 2,
9
+ "layer_norm_epsilon": 1e-05,
10
+ "model_type": "chess_transformer",
11
+ "n_ctx": 256,
12
+ "n_embd": 120,
13
+ "n_head": 4,
14
+ "n_inner": 360,
15
+ "n_layer": 6,
16
+ "pad_token_id": 0,
17
+ "tie_weights": true,
18
+ "transformers_version": "4.57.5",
19
+ "unk_token_id": 3,
20
+ "vocab_size": 512
21
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8ee69ac57bb9c15cff3921b710f4b63230e178ce1d73a156fcea9b3e45e00de
3
+ size 3881771
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[BOS]",
3
+ "eos_token": "[EOS]",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ import json
3
+ import os
4
+ import shutil
5
+ import re
6
+
7
+ from collections import Counter
8
+ from datasets import load_dataset
9
+ from typing import Dict, List, Optional
10
+ from transformers import PreTrainedTokenizer
11
+
12
+ SQUARE_MOVE_PATTERN = re.compile(r"([a-h][1-8])([a-h][1-8])")
13
+ PROMOTION_PATTERN = re.compile(r"=([NBRQ])")
14
+
15
+
16
+ def normalize_move(token: str) -> str:
17
+ if token.startswith("["):
18
+ return token
19
+
20
+ move_match = SQUARE_MOVE_PATTERN.search(token)
21
+ if not move_match:
22
+ return token
23
+
24
+ from_sq, to_sq = move_match.group(1), move_match.group(2)
25
+
26
+ promotion_suffix = ""
27
+ promo_match = PROMOTION_PATTERN.search(token)
28
+ if promo_match:
29
+ promotion_suffix = "=" + promo_match.group(1)
30
+
31
+ piece_prefix = token[:2] if len(token) >= 2 else "WP"
32
+
33
+ return f"{piece_prefix}{from_sq}{to_sq}{promotion_suffix}"
34
+
35
+
36
+
37
+ class ChessTokenizer(PreTrainedTokenizer):
38
+ model_input_names = ["input_ids", "attention_mask"]
39
+ vocab_files_names = {"vocab_file": "vocab.json"}
40
+
41
+ PAD_TOKEN = "[PAD]"
42
+ BOS_TOKEN = "[BOS]"
43
+ EOS_TOKEN = "[EOS]"
44
+ UNK_TOKEN = "[UNK]"
45
+
46
+ def __init__(self, vocab_file=None, vocab=None, **kwargs):
47
+ self._pad_token = self.PAD_TOKEN
48
+ self._bos_token = self.BOS_TOKEN
49
+ self._eos_token = self.EOS_TOKEN
50
+ self._unk_token = self.UNK_TOKEN
51
+
52
+ for t in ["pad_token", "bos_token", "eos_token", "unk_token"]:
53
+ kwargs.pop(t, None)
54
+
55
+ if vocab is None:
56
+ if vocab_file is None:
57
+ vocab_file = os.path.join(os.path.dirname(__file__), "vocab.json")
58
+ self.vocab_file = vocab_file
59
+ if os.path.exists(vocab_file):
60
+ with open(vocab_file, "r", encoding="utf-8") as f:
61
+ self._vocab = json.load(f)
62
+ else:
63
+ self._vocab = self._create_default_vocab()
64
+ else:
65
+ self._vocab = vocab
66
+ self.vocab_file = vocab_file
67
+
68
+ self._ids_to_tokens = {v: k for k, v in self._vocab.items()}
69
+ super().__init__(
70
+ pad_token=self.PAD_TOKEN,
71
+ bos_token=self.BOS_TOKEN,
72
+ eos_token=self.EOS_TOKEN,
73
+ unk_token=self.UNK_TOKEN,
74
+ **kwargs,
75
+ )
76
+
77
+ def save_pretrained(self, save_directory: str, **kwargs):
78
+ super().save_pretrained(save_directory, **kwargs)
79
+ src_path = os.path.abspath(__file__)
80
+ dst_path = os.path.join(save_directory, "tokenizer.py")
81
+ if src_path != dst_path:
82
+ shutil.copy(src_path, dst_path)
83
+
84
+ config_path = os.path.join(save_directory, "tokenizer_config.json")
85
+ if os.path.exists(config_path):
86
+ with open(config_path, "r") as f:
87
+ cfg = json.load(f)
88
+ cfg["auto_map"] = {"AutoTokenizer": "tokenizer.ChessTokenizer"}
89
+ with open(config_path, "w") as f:
90
+ json.dump(cfg, f, indent=2)
91
+
92
+ def _create_default_vocab(self):
93
+ return {
94
+ t: i
95
+ for i, t in enumerate([self.PAD_TOKEN, self.BOS_TOKEN, self.EOS_TOKEN, self.UNK_TOKEN])
96
+ }
97
+
98
+ @classmethod
99
+ def build_vocab_from_dataset(
100
+ cls,
101
+ dataset_name,
102
+ split="train",
103
+ column="text",
104
+ max_vocab_size=512,
105
+ min_frequency=500,
106
+ max_samples=100000,
107
+ ):
108
+
109
+ ds = load_dataset(dataset_name, split=split, streaming=True)
110
+ ds = ds.take(max_samples)
111
+
112
+ counter = Counter()
113
+ for ex in ds:
114
+ moves = [normalize_move(t) for t in ex[column].split()]
115
+ counter.update(moves)
116
+
117
+ special = [cls.PAD_TOKEN, cls.BOS_TOKEN, cls.EOS_TOKEN, cls.UNK_TOKEN]
118
+ most_common = counter.most_common(max_vocab_size - len(special))
119
+
120
+ vocab = {t: i for i, t in enumerate(special + [t for t, c in most_common])}
121
+ return cls(vocab=vocab)
122
+
123
+ @property
124
+ def vocab_size(self):
125
+ return len(self._vocab)
126
+
127
+ def get_vocab(self):
128
+ return dict(self._vocab)
129
+
130
+ def _tokenize(self, text):
131
+ return [normalize_move(t) for t in text.strip().split()]
132
+
133
+ def _convert_token_to_id(self, token):
134
+ return self._vocab.get(token, self._vocab.get(self.UNK_TOKEN))
135
+
136
+ def _convert_id_to_token(self, index):
137
+ return self._ids_to_tokens.get(index, self.UNK_TOKEN)
138
+
139
+ def convert_tokens_to_string(self, tokens):
140
+ return " ".join(
141
+ t
142
+ for t in tokens
143
+ if t not in [self.PAD_TOKEN, self.BOS_TOKEN, self.EOS_TOKEN, self.UNK_TOKEN]
144
+ )
145
+
146
+ def save_vocabulary(self, save_directory, filename_prefix=None):
147
+ if not os.path.isdir(save_directory):
148
+ os.makedirs(save_directory, exist_ok=True)
149
+ path = os.path.join(
150
+ save_directory, (filename_prefix + "-" if filename_prefix else "") + "vocab.json"
151
+ )
152
+ with open(path, "w", encoding="utf-8") as f:
153
+ json.dump(self._vocab, f, ensure_ascii=False, indent=2)
154
+ return (path,)
tokenizer_config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[BOS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[EOS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "auto_map": {
37
+ "AutoTokenizer": "tokenizer.ChessTokenizer"
38
+ },
39
+ "bos_token": "[BOS]",
40
+ "clean_up_tokenization_spaces": false,
41
+ "eos_token": "[EOS]",
42
+ "extra_special_tokens": {},
43
+ "model_max_length": 1000000000000000019884624838656,
44
+ "pad_token": "[PAD]",
45
+ "tokenizer_class": "ChessTokenizer",
46
+ "unk_token": "[UNK]"
47
+ }
vocab.json ADDED
@@ -0,0 +1,514 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 0,
3
+ "[BOS]": 1,
4
+ "[EOS]": 2,
5
+ "[UNK]": 3,
6
+ "WNg1f3": 4,
7
+ "BNg8f6": 5,
8
+ "WPe2e4": 6,
9
+ "WPd2d4": 7,
10
+ "WNb1c3": 8,
11
+ "WKe1g1": 9,
12
+ "BNb8c6": 10,
13
+ "BKe8g8": 11,
14
+ "BPd7d5": 12,
15
+ "BPe7e6": 13,
16
+ "BPe7e5": 14,
17
+ "BPd7d6": 15,
18
+ "WPc2c3": 16,
19
+ "WPh2h3": 17,
20
+ "BPg7g6": 18,
21
+ "BPc7c6": 19,
22
+ "BPh7h6": 20,
23
+ "BPc7c5": 21,
24
+ "BPa7a6": 22,
25
+ "WPc2c4": 23,
26
+ "WNf3e5": 24,
27
+ "BBf8e7": 25,
28
+ "WPa2a3": 26,
29
+ "WPg2g3": 27,
30
+ "WPe2e3": 28,
31
+ "BNb8d7": 29,
32
+ "WPf2f4": 30,
33
+ "WBf1c4": 31,
34
+ "WRf1e1": 32,
35
+ "WPd2d3": 33,
36
+ "WNb1d2": 34,
37
+ "WPe4e5": 35,
38
+ "BPb7b6": 36,
39
+ "WBf1d3": 37,
40
+ "BNf6e4": 38,
41
+ "BPf7f6": 39,
42
+ "WPb2b3": 40,
43
+ "BPb7b5": 41,
44
+ "WBc1g5": 42,
45
+ "WBc1e3": 43,
46
+ "WPf2f3": 44,
47
+ "BBc8g4": 45,
48
+ "WPe4d5": 46,
49
+ "BRf8e8": 47,
50
+ "BBf8g7": 48,
51
+ "WBf1e2": 49,
52
+ "BPf7f5": 50,
53
+ "WNf3d4": 51,
54
+ "WPb2b4": 52,
55
+ "WBc1f4": 53,
56
+ "BPc5d4": 54,
57
+ "BBc8b7": 55,
58
+ "BNf6d5": 56,
59
+ "BNc6d4": 57,
60
+ "WNc3d5": 58,
61
+ "WPg2g4": 59,
62
+ "WPa2a4": 60,
63
+ "BBf8d6": 61,
64
+ "BBc8d7": 62,
65
+ "WPh2h4": 63,
66
+ "BPa7a5": 64,
67
+ "WPd4d5": 65,
68
+ "BBc8e6": 66,
69
+ "WPd4e5": 67,
70
+ "WRa1d1": 68,
71
+ "BRa8c8": 69,
72
+ "BBf8c5": 70,
73
+ "BNc6e5": 71,
74
+ "WQd1e2": 72,
75
+ "BNg8e7": 73,
76
+ "WNc3e4": 74,
77
+ "BPh7h5": 75,
78
+ "BBc8f5": 76,
79
+ "BRa8d8": 77,
80
+ "BPe6e5": 78,
81
+ "BQd8e7": 79,
82
+ "WRa1c1": 80,
83
+ "WNf3g5": 81,
84
+ "BPe5d4": 82,
85
+ "WQd1f3": 83,
86
+ "BPd5e4": 84,
87
+ "BPc6c5": 85,
88
+ "WBf1b5": 86,
89
+ "WKe1c1": 87,
90
+ "WKg1h1": 88,
91
+ "BPg7g5": 89,
92
+ "WQd1d2": 90,
93
+ "WBc1d2": 91,
94
+ "BBf8b4": 92,
95
+ "WBf1g2": 93,
96
+ "BKg8h8": 94,
97
+ "WPc4d5": 95,
98
+ "BKg8g7": 96,
99
+ "WBc1b2": 97,
100
+ "BPe5e4": 98,
101
+ "BRa8b8": 99,
102
+ "WPh4h5": 100,
103
+ "BKe8c8": 101,
104
+ "BPe6d5": 102,
105
+ "BPd5d4": 103,
106
+ "BPd6d5": 104,
107
+ "BQd8c7": 105,
108
+ "WPc3c4": 106,
109
+ "WRa1b1": 107,
110
+ "BQd8f6": 108,
111
+ "BPb5b4": 109,
112
+ "BPc6d5": 110,
113
+ "WKg1g2": 111,
114
+ "BNf6g4": 112,
115
+ "BQd8d7": 113,
116
+ "BPd6e5": 114,
117
+ "BPb7c6": 115,
118
+ "WPf4f5": 116,
119
+ "WPc3d4": 117,
120
+ "WPe3e4": 118,
121
+ "BPa5a4": 119,
122
+ "WPg4g5": 120,
123
+ "WRf1d1": 121,
124
+ "BRf8d8": 122,
125
+ "WRa1e1": 123,
126
+ "WNg1e2": 124,
127
+ "WPb2c3": 125,
128
+ "BBg4f3": 126,
129
+ "WPh3h4": 127,
130
+ "WKg1h2": 128,
131
+ "BQd8b6": 129,
132
+ "BNd7e5": 130,
133
+ "BPc5c4": 131,
134
+ "WQd1c2": 132,
135
+ "WBg5f6": 133,
136
+ "WPd3d4": 134,
137
+ "BRa8e8": 135,
138
+ "BPg6g5": 136,
139
+ "BKg8h7": 137,
140
+ "WPc4c5": 138,
141
+ "BNf6d7": 139,
142
+ "BBe7f6": 140,
143
+ "WPb4b5": 141,
144
+ "WPa4a5": 142,
145
+ "BPh5h4": 143,
146
+ "WNd2f3": 144,
147
+ "BPa6a5": 145,
148
+ "BNd7f6": 146,
149
+ "BQd8d5": 147,
150
+ "WPf3f4": 148,
151
+ "BPg5g4": 149,
152
+ "WKg1f2": 150,
153
+ "BPf6f5": 151,
154
+ "WNc3e2": 152,
155
+ "WPg3g4": 153,
156
+ "BPh6h5": 154,
157
+ "BPf5f4": 155,
158
+ "BKg8f7": 156,
159
+ "WKg1f1": 157,
160
+ "WNd2e4": 158,
161
+ "BKg8f8": 159,
162
+ "BPd5c4": 160,
163
+ "WNc3b5": 161,
164
+ "BNf6h5": 162,
165
+ "WBb5c6": 163,
166
+ "WPd4c5": 164,
167
+ "WPf4e5": 165,
168
+ "WBe2f3": 166,
169
+ "BBb4c3": 167,
170
+ "WBc4b3": 168,
171
+ "WQd1d3": 169,
172
+ "WQd1b3": 170,
173
+ "BNc6e7": 171,
174
+ "BPb6b5": 172,
175
+ "WNf3d2": 173,
176
+ "BNc6b4": 174,
177
+ "WPa3a4": 175,
178
+ "WNf3h4": 176,
179
+ "WQd1d4": 177,
180
+ "BNd7c5": 178,
181
+ "BNe7f5": 179,
182
+ "WPg2f3": 180,
183
+ "WPb3b4": 181,
184
+ "BPg7f6": 182,
185
+ "BNc6a5": 183,
186
+ "BPf7e6": 184,
187
+ "BNe7g6": 185,
188
+ "BNd7b6": 186,
189
+ "WKc1b1": 187,
190
+ "BRf8f7": 188,
191
+ "WNe2g3": 189,
192
+ "WNd2c4": 190,
193
+ "BPe5f4": 191,
194
+ "WBg5h4": 192,
195
+ "BQd8d6": 193,
196
+ "BBg4h5": 194,
197
+ "WQd1h5": 195,
198
+ "BQd8a5": 196,
199
+ "WPe3d4": 197,
200
+ "WRf1f2": 198,
201
+ "WPe5f6": 199,
202
+ "BRh8g8": 200,
203
+ "WBd3e4": 201,
204
+ "BBd7c6": 202,
205
+ "WNc3a4": 203,
206
+ "BKc8b8": 204,
207
+ "BPf6e5": 205,
208
+ "BRf8c8": 206,
209
+ "WRd1e1": 207,
210
+ "WPe4f5": 208,
211
+ "WBe3d4": 209,
212
+ "BKe8d7": 210,
213
+ "BPh6g5": 211,
214
+ "WPe5e6": 212,
215
+ "WRa1f1": 213,
216
+ "BRa8f8": 214,
217
+ "WRd1d2": 215,
218
+ "BKe8f7": 216,
219
+ "BBe7d6": 217,
220
+ "WNd4c6": 218,
221
+ "BKe8d8": 219,
222
+ "BKe8e7": 220,
223
+ "BPf5e4": 221,
224
+ "WPf2e3": 222,
225
+ "WRh1g1": 223,
226
+ "WNd2b3": 224,
227
+ "WRe1e2": 225,
228
+ "WNe2f4": 226,
229
+ "BRd8d7": 227,
230
+ "WPd5d6": 228,
231
+ "WPd3e4": 229,
232
+ "WPh3g4": 230,
233
+ "BPa6b5": 231,
234
+ "BPa4a3": 232,
235
+ "WPh5h6": 233,
236
+ "WBf4g3": 234,
237
+ "BRd8e8": 235,
238
+ "WRf1c1": 236,
239
+ "WRe1d1": 237,
240
+ "WNe4f6": 238,
241
+ "BBd6e5": 239,
242
+ "BKf8e7": 240,
243
+ "WBf4e5": 241,
244
+ "WKf1e2": 242,
245
+ "BPd4d3": 243,
246
+ "BPh7g6": 244,
247
+ "BRe8e7": 245,
248
+ "WPa3b4": 246,
249
+ "WBc4d5": 247,
250
+ "WRh1e1": 248,
251
+ "WBd2c3": 249,
252
+ "WRd1d8": 250,
253
+ "BRh8f8": 251,
254
+ "BNe7d5": 252,
255
+ "WRe1e3": 253,
256
+ "WPa5a6": 254,
257
+ "BRd8d1": 255,
258
+ "BKg7f6": 256,
259
+ "BBc8a6": 257,
260
+ "WPe5d6": 258,
261
+ "WBe2d3": 259,
262
+ "BRh8e8": 260,
263
+ "WNe5c6": 261,
264
+ "WPa4b5": 262,
265
+ "WRf1f3": 263,
266
+ "WPf3e4": 264,
267
+ "BKe8f8": 265,
268
+ "BQd8h4": 266,
269
+ "BBc5b6": 267,
270
+ "WQd1g4": 268,
271
+ "BPe6f5": 269,
272
+ "BBe7g5": 270,
273
+ "BPe4e3": 271,
274
+ "WKe1d2": 272,
275
+ "BQd8g5": 273,
276
+ "WKe1e2": 274,
277
+ "WQd1a4": 275,
278
+ "WKg2f3": 276,
279
+ "WKe1d1": 277,
280
+ "BNb8a6": 278,
281
+ "BPa5b4": 279,
282
+ "BPf7g6": 280,
283
+ "BNg8h6": 281,
284
+ "WNe2d4": 282,
285
+ "WBc4d3": 283,
286
+ "WKf2e3": 284,
287
+ "WRd1d7": 285,
288
+ "WRe1e8": 286,
289
+ "BNe5f3": 287,
290
+ "BPb5c4": 288,
291
+ "BRf8f6": 289,
292
+ "BRe8d8": 290,
293
+ "BPh4h3": 291,
294
+ "BBe6d5": 292,
295
+ "BBg7e5": 293,
296
+ "WRh1f1": 294,
297
+ "WPh4g5": 295,
298
+ "BPh5g4": 296,
299
+ "BNd5c3": 297,
300
+ "BBh5g6": 298,
301
+ "WPd5c6": 299,
302
+ "WNb1a3": 300,
303
+ "WBh4g3": 301,
304
+ "BRe8e1": 302,
305
+ "BKf7e6": 303,
306
+ "WRe1e4": 304,
307
+ "WBg5e7": 305,
308
+ "BPb4b3": 306,
309
+ "BPd4c3": 307,
310
+ "BNe7c6": 308,
311
+ "BBf5g6": 309,
312
+ "BBg7f6": 310,
313
+ "WKh1g1": 311,
314
+ "BBe7c5": 312,
315
+ "WBd3e2": 313,
316
+ "BPg6f5": 314,
317
+ "WNe5d7": 315,
318
+ "BKh8g8": 316,
319
+ "WPa2b3": 317,
320
+ "WPf5f6": 318,
321
+ "WNg5e6": 319,
322
+ "BRd8d2": 320,
323
+ "WBd3c4": 321,
324
+ "WRe1e5": 322,
325
+ "WRe1e7": 323,
326
+ "BKg7h6": 324,
327
+ "WRd1d3": 325,
328
+ "WPh2g3": 326,
329
+ "BNe4c3": 327,
330
+ "WPc5c6": 328,
331
+ "WBe3c5": 329,
332
+ "WBc4b5": 330,
333
+ "WPb5b6": 331,
334
+ "BRe8e6": 332,
335
+ "BKe7d6": 333,
336
+ "WKe1f1": 334,
337
+ "WBc4e6": 335,
338
+ "WBd3f5": 336,
339
+ "BPc4c3": 337,
340
+ "WRe1f1": 338,
341
+ "WPg5g6": 339,
342
+ "WBf4d6": 340,
343
+ "BPb6c5": 341,
344
+ "WBe3f4": 342,
345
+ "BPc7d6": 343,
346
+ "WKh1h2": 344,
347
+ "WKe1f2": 345,
348
+ "WBb5a4": 346,
349
+ "WPh5g6": 347,
350
+ "WPb4c5": 348,
351
+ "WPb3c4": 349,
352
+ "BNe5c4": 350,
353
+ "BBc5d4": 351,
354
+ "WNe4d6": 352,
355
+ "BBf5e4": 353,
356
+ "BPf4f3": 354,
357
+ "BBf5d3": 355,
358
+ "WRh1d1": 356,
359
+ "WQd1e1": 357,
360
+ "WKg2g3": 358,
361
+ "BQd8e8": 359,
362
+ "WBe3g5": 360,
363
+ "BRh8d8": 361,
364
+ "WPg4f5": 362,
365
+ "WBd3c2": 363,
366
+ "WKh2g3": 364,
367
+ "WBc4f7": 365,
368
+ "WRd1c1": 366,
369
+ "BRd8d6": 367,
370
+ "WNg5f3": 368,
371
+ "WNd5f6": 369,
372
+ "WQf3g3": 370,
373
+ "WKe2d3": 371,
374
+ "WKg2h3": 372,
375
+ "BRc8d8": 373,
376
+ "BKf7g6": 374,
377
+ "BNd4f3": 375,
378
+ "WKf2g3": 376,
379
+ "BRe8e5": 377,
380
+ "BBb7d5": 378,
381
+ "BKf7e7": 379,
382
+ "WRd1d4": 380,
383
+ "BRe8f8": 381,
384
+ "WNf3h2": 382,
385
+ "BNd5f4": 383,
386
+ "BRe8e2": 384,
387
+ "BPe4f3": 385,
388
+ "BNe5d3": 386,
389
+ "BRd8c8": 387,
390
+ "WPd5e6": 388,
391
+ "WPc2d3": 389,
392
+ "WBb5d7": 390,
393
+ "BBd6e7": 391,
394
+ "WNd4f5": 392,
395
+ "BNh5f4": 393,
396
+ "WNg5f7": 394,
397
+ "BRd8d5": 395,
398
+ "WBg5e3": 396,
399
+ "BBd6f4": 397,
400
+ "WPf2g3": 398,
401
+ "BRc8c7": 399,
402
+ "WNe4c5": 400,
403
+ "BPg4g3": 401,
404
+ "WNb5d6": 402,
405
+ "WRc1d1": 403,
406
+ "BKh8h7": 404,
407
+ "BKf7f6": 405,
408
+ "WBe2g4": 406,
409
+ "WRd1d5": 407,
410
+ "BPa7b6": 408,
411
+ "WRe1e6": 409,
412
+ "WKf2e2": 410,
413
+ "WBc1h6": 411,
414
+ "WRd1d6": 412,
415
+ "BKg7g6": 413,
416
+ "BNg4e3": 414,
417
+ "WBc1a3": 415,
418
+ "WPg3f4": 416,
419
+ "WNe5f3": 417,
420
+ "WBe2c4": 418,
421
+ "WRd1f1": 419,
422
+ "BRc8c2": 420,
423
+ "BNa5c4": 421,
424
+ "BKh7g8": 422,
425
+ "BPa3a2": 423,
426
+ "WNe2c3": 424,
427
+ "WBe3d2": 425,
428
+ "WKc1d2": 426,
429
+ "BKh7g6": 427,
430
+ "WKf2f3": 428,
431
+ "BQd8c8": 429,
432
+ "BRd8d4": 430,
433
+ "BKe7d7": 431,
434
+ "BNe4d2": 432,
435
+ "BRf8g8": 433,
436
+ "BBc5e3": 434,
437
+ "BBd7e6": 435,
438
+ "BBb7e4": 436,
439
+ "WKh2g1": 437,
440
+ "WNd4f3": 438,
441
+ "WBd2e3": 439,
442
+ "WNg1h3": 440,
443
+ "WBf4g5": 441,
444
+ "BRd8f8": 442,
445
+ "WRc1c2": 443,
446
+ "BNb6c4": 444,
447
+ "WNe4g5": 445,
448
+ "WBd3g6": 446,
449
+ "BBd7b5": 447,
450
+ "BRe8e4": 448,
451
+ "WPa6a7": 449,
452
+ "WNg3f5": 450,
453
+ "BKc8d7": 451,
454
+ "BRf8b8": 452,
455
+ "BNe4f6": 453,
456
+ "WNd2f1": 454,
457
+ "BKe7f6": 455,
458
+ "WRf1g1": 456,
459
+ "WBg5f4": 457,
460
+ "BBe6c4": 458,
461
+ "WKh1g2": 459,
462
+ "BNf6e8": 460,
463
+ "WBf4e3": 461,
464
+ "WQe2f3": 462,
465
+ "BRd8d3": 463,
466
+ "WRc1c7": 464,
467
+ "WKe2d2": 465,
468
+ "BBd6c5": 466,
469
+ "WBd3b5": 467,
470
+ "BNg6f4": 468,
471
+ "BKh8g7": 469,
472
+ "BPg5f4": 470,
473
+ "BKf8g7": 471,
474
+ "BBg7h6": 472,
475
+ "BKf8e8": 473,
476
+ "WPc4b5": 474,
477
+ "BQf6g6": 475,
478
+ "WKe2f3": 476,
479
+ "BKf8g8": 477,
480
+ "BBb7c6": 478,
481
+ "BBg4e2": 479,
482
+ "WNg5e4": 480,
483
+ "BKg7f7": 481,
484
+ "BKd7c6": 482,
485
+ "BNf6h7": 483,
486
+ "BNb6d5": 484,
487
+ "WRf1b1": 485,
488
+ "BBg7d4": 486,
489
+ "WQd1d8": 487,
490
+ "BNd7f8": 488,
491
+ "WPc3b4": 489,
492
+ "BNg4f6": 490,
493
+ "BKf7g8": 491,
494
+ "WNd5e7": 492,
495
+ "BNc5e4": 493,
496
+ "WNh4f5": 494,
497
+ "BNd5e3": 495,
498
+ "BQe7f6": 496,
499
+ "BNb4d3": 497,
500
+ "WNd4e6": 498,
501
+ "BBb4a5": 499,
502
+ "WBe3h6": 500,
503
+ "WKf1g1": 501,
504
+ "WQd1d5": 502,
505
+ "WPf4g5": 503,
506
+ "WKe2e3": 504,
507
+ "BRf8f5": 505,
508
+ "BBb4d2": 506,
509
+ "BQd8d4": 507,
510
+ "BKe7e6": 508,
511
+ "BPd6c5": 509,
512
+ "BNd5f6": 510,
513
+ "BQd8d1": 511
514
+ }