filomeneroquefort commited on
Commit
a109189
·
verified ·
1 Parent(s): 0c28df7

Chess Challenge submission by filomeneroquefort

Browse files
Files changed (5) hide show
  1. README.md +4 -4
  2. config.json +6 -10
  3. model.safetensors +2 -2
  4. tokenizer.py +118 -199
  5. vocab.json +86 -1200
README.md CHANGED
@@ -10,11 +10,11 @@ license: mit
10
  Chess model submitted to the LLM Course Chess Challenge.
11
  ## Submission Info
12
  - **Submitted by**: [filomeneroquefort](https://huggingface.co/filomeneroquefort)
13
- - **Parameters**: 1,677,436
14
  - **Organization**: LLM-course
15
  ## Model Details
16
  - **Architecture**: Chess Transformer (GPT-style)
17
- - **Vocab size**: 1204
18
- - **Embedding dim**: 160
19
  - **Layers**: 6
20
- - **Heads**: 4
 
10
  Chess model submitted to the LLM Course Chess Challenge.
11
  ## Submission Info
12
  - **Submitted by**: [filomeneroquefort](https://huggingface.co/filomeneroquefort)
13
+ - **Parameters**: 885,696
14
  - **Organization**: LLM-course
15
  ## Model Details
16
  - **Architecture**: Chess Transformer (GPT-style)
17
+ - **Vocab size**: 90
18
+ - **Embedding dim**: 112
19
  - **Layers**: 6
20
+ - **Heads**: 8
config.json CHANGED
@@ -3,22 +3,18 @@
3
  "ChessForCausalLM"
4
  ],
5
  "bos_token_id": 1,
6
- "dropout": 0.0,
7
  "dtype": "float32",
8
  "eos_token_id": 2,
9
  "layer_norm_epsilon": 1e-05,
10
  "model_type": "chess_transformer",
11
- "n_ctx": 256,
12
- "n_embd": 160,
13
- "n_head": 4,
14
- "n_inner": 426,
15
- "n_kv_head": 2,
16
  "n_layer": 6,
17
  "pad_token_id": 0,
18
- "rope_theta": 10000.0,
19
  "tie_weights": true,
20
  "transformers_version": "4.57.6",
21
- "use_gqa": true,
22
- "use_rope": true,
23
- "vocab_size": 1204
24
  }
 
3
  "ChessForCausalLM"
4
  ],
5
  "bos_token_id": 1,
6
+ "dropout": 0.1,
7
  "dtype": "float32",
8
  "eos_token_id": 2,
9
  "layer_norm_epsilon": 1e-05,
10
  "model_type": "chess_transformer",
11
+ "n_ctx": 1024,
12
+ "n_embd": 112,
13
+ "n_head": 8,
14
+ "n_inner": 336,
 
15
  "n_layer": 6,
16
  "pad_token_id": 0,
 
17
  "tie_weights": true,
18
  "transformers_version": "4.57.6",
19
+ "vocab_size": 90
 
 
20
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bfa57b78ceae0ce8f7653d2fca3abda1ec8ead5b2b85c9fec4ace6f6e2e0bd98
3
- size 6716208
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df7a86dd032d238f1187972c5a09513a33352263500b3e0537467c5263dc502b
3
+ size 3549224
tokenizer.py CHANGED
@@ -1,42 +1,29 @@
1
  """
2
- Custom Chess Tokenizer for the Chess Challenge.
3
 
4
- This tokenizer treats each move as a single token using the extended UCI notation
5
- from the Lichess dataset (e.g., WPe2e4, BNg8f6).
6
 
7
- The dataset format uses:
8
- - W/B prefix for White/Black
9
- - Piece letter: P=Pawn, N=Knight, B=Bishop, R=Rook, Q=Queen, K=King
10
- - Source and destination squares (e.g., e2e4)
11
- - Special suffixes: (x)=capture, (+)=check, (+*)=checkmate, (o)/(O)=castling
 
12
  """
13
 
14
  from __future__ import annotations
15
 
16
  import json
17
  import os
18
- from pathlib import Path
19
  from typing import Dict, List, Optional
20
 
21
  from transformers import PreTrainedTokenizer
22
 
23
-
24
  class ChessTokenizer(PreTrainedTokenizer):
25
- """
26
- A custom tokenizer for chess moves using extended UCI notation.
27
-
28
- This tokenizer maps each possible chess move to a unique token ID.
29
- The vocabulary is built from the training dataset to ensure all moves
30
- encountered during training have a corresponding token.
31
-
32
- Example:
33
- >>> tokenizer = ChessTokenizer()
34
- >>> tokenizer.encode("WPe2e4 BPe7e5")
35
- [1, 42, 87, 2] # [BOS, e2e4, e7e5, EOS]
36
- """
37
 
38
  model_input_names = ["input_ids", "attention_mask"]
39
- vocab_files_names = {"vocab_file": "vocab.json"}
40
 
41
  # Special tokens
42
  PAD_TOKEN = "[PAD]"
@@ -44,48 +31,37 @@ class ChessTokenizer(PreTrainedTokenizer):
44
  EOS_TOKEN = "[EOS]"
45
  UNK_TOKEN = "[UNK]"
46
 
 
 
 
 
47
  def __init__(
48
  self,
49
  vocab_file: Optional[str] = None,
50
  vocab: Optional[Dict[str, int]] = None,
51
  **kwargs,
52
  ):
53
- """
54
- Initialize the chess tokenizer.
55
-
56
- Args:
57
- vocab_file: Path to a JSON file containing the vocabulary mapping.
58
- vocab: Dictionary mapping tokens to IDs (alternative to vocab_file).
59
- **kwargs: Additional arguments passed to PreTrainedTokenizer.
60
- """
61
- # Initialize special tokens
62
  self._pad_token = self.PAD_TOKEN
63
  self._bos_token = self.BOS_TOKEN
64
  self._eos_token = self.EOS_TOKEN
65
  self._unk_token = self.UNK_TOKEN
66
-
67
- # Remove any duplicate special-token entries passed through kwargs
68
- # to avoid "multiple values for keyword" errors when loading from disk.
69
  kwargs.pop("pad_token", None)
70
  kwargs.pop("bos_token", None)
71
  kwargs.pop("eos_token", None)
72
  kwargs.pop("unk_token", None)
73
 
74
- # Load or create vocabulary
75
  if vocab is not None:
76
  self._vocab = vocab
77
  elif vocab_file is not None and os.path.exists(vocab_file):
78
  with open(vocab_file, "r", encoding="utf-8") as f:
79
  self._vocab = json.load(f)
80
  else:
81
- # Create a minimal vocabulary with just special tokens
82
- # The full vocabulary should be built from the dataset
83
- self._vocab = self._create_default_vocab()
84
-
85
- # Create reverse mapping
86
  self._ids_to_tokens = {v: k for k, v in self._vocab.items()}
87
 
88
- # Call parent init AFTER setting up vocab
89
  super().__init__(
90
  pad_token=self._pad_token,
91
  bos_token=self._bos_token,
@@ -93,186 +69,129 @@ class ChessTokenizer(PreTrainedTokenizer):
93
  unk_token=self._unk_token,
94
  **kwargs,
95
  )
96
-
97
- def _create_default_vocab(self) -> Dict[str, int]:
98
  """
99
- Create a minimal default vocabulary with just special tokens.
100
-
101
- For the full vocabulary, use `build_vocab_from_dataset()`.
102
- This minimal vocab is just a placeholder - you should build from data.
103
  """
104
- special_tokens = [self.PAD_TOKEN, self.BOS_TOKEN, self.EOS_TOKEN, self.UNK_TOKEN]
105
- vocab = {token: idx for idx, token in enumerate(special_tokens)}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
  return vocab
107
-
108
- @classmethod
109
- def build_vocab_from_iterator(
110
- cls,
111
- iterator,
112
- min_frequency: int = 1,
113
- ) -> "ChessTokenizer":
114
- """
115
- Build a tokenizer vocabulary from an iterator of game strings.
116
-
117
- Args:
118
- iterator: An iterator yielding game strings (space-separated moves).
119
- min_frequency: Minimum frequency for a token to be included.
120
-
121
- Returns:
122
- A ChessTokenizer with the built vocabulary.
123
- """
124
- from collections import Counter
125
-
126
- token_counts = Counter()
127
-
128
- for game in iterator:
129
- moves = game.strip().split()
130
- token_counts.update(moves)
131
-
132
- # Filter by frequency
133
- tokens = [
134
- token for token, count in token_counts.items()
135
- if count >= min_frequency
136
- ]
137
-
138
- # Sort for reproducibility
139
- tokens = sorted(tokens)
140
-
141
- # Build vocabulary
142
- special_tokens = [cls.PAD_TOKEN, cls.BOS_TOKEN, cls.EOS_TOKEN, cls.UNK_TOKEN]
143
- vocab = {token: idx for idx, token in enumerate(special_tokens + tokens)}
144
-
145
- return cls(vocab=vocab)
146
-
147
- @classmethod
148
- def build_vocab_from_dataset(
149
- cls,
150
- dataset_name: str = "dlouapre/lichess_2025-01_1M",
151
- split: str = "train",
152
- column: str = "text",
153
- min_frequency: int = 500,
154
- max_samples: Optional[int] = 100000,
155
- ) -> "ChessTokenizer":
156
- """
157
- Build a tokenizer vocabulary from a Hugging Face dataset.
158
-
159
- Args:
160
- dataset_name: Name of the dataset on Hugging Face Hub.
161
- split: Dataset split to use.
162
- column: Column containing the game strings.
163
- min_frequency: Minimum frequency for a token to be included (default: 500).
164
- max_samples: Maximum number of samples to process (default: 100k).
165
-
166
- Returns:
167
- A ChessTokenizer with the built vocabulary.
168
- """
169
- from datasets import load_dataset
170
-
171
- dataset = load_dataset(dataset_name, split=split)
172
-
173
- if max_samples is not None:
174
- dataset = dataset.select(range(min(max_samples, len(dataset))))
175
-
176
- def game_iterator():
177
- for example in dataset:
178
- yield example[column]
179
-
180
- return cls.build_vocab_from_iterator(game_iterator(), min_frequency=min_frequency)
181
-
182
  @property
183
  def vocab_size(self) -> int:
184
- """Return the size of the vocabulary."""
185
  return len(self._vocab)
186
-
187
  def get_vocab(self) -> Dict[str, int]:
188
- """Return the vocabulary as a dictionary."""
189
  return dict(self._vocab)
190
-
191
  def _tokenize(self, text: str) -> List[str]:
192
  """
193
- Tokenize a string of moves into a list of tokens.
194
-
195
- Args:
196
- text: A string of space-separated moves.
197
-
198
- Returns:
199
- List of move tokens.
200
  """
201
- return text.strip().split()
202
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
203
  def _convert_token_to_id(self, token: str) -> int:
204
- """Convert a token to its ID."""
205
- return self._vocab.get(token, self._vocab.get(self.UNK_TOKEN, 0))
206
-
207
  def _convert_id_to_token(self, index: int) -> str:
208
- """Convert an ID to its token."""
209
  return self._ids_to_tokens.get(index, self.UNK_TOKEN)
210
-
211
  def convert_tokens_to_string(self, tokens: List[str]) -> str:
212
- """Convert a list of tokens back to a string."""
213
- # Filter out special tokens for cleaner output
214
- special = {self.PAD_TOKEN, self.BOS_TOKEN, self.EOS_TOKEN, self.UNK_TOKEN}
215
- return " ".join(t for t in tokens if t not in special)
216
-
217
- def save_vocabulary(
218
- self,
219
- save_directory: str,
220
- filename_prefix: Optional[str] = None,
221
- ) -> tuple:
222
  """
223
- Save the vocabulary to a JSON file.
 
 
 
 
 
224
 
225
- Args:
226
- save_directory: Directory to save the vocabulary.
227
- filename_prefix: Optional prefix for the filename.
228
 
229
- Returns:
230
- Tuple containing the path to the saved vocabulary file.
231
- """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
  if not os.path.isdir(save_directory):
233
  os.makedirs(save_directory, exist_ok=True)
234
-
235
  vocab_file = os.path.join(
236
- save_directory,
237
- (filename_prefix + "-" if filename_prefix else "") + "vocab.json",
238
  )
239
-
240
  with open(vocab_file, "w", encoding="utf-8") as f:
241
  json.dump(self._vocab, f, ensure_ascii=False, indent=2)
242
-
243
  return (vocab_file,)
244
 
245
-
246
- def count_vocab_from_dataset(
247
- dataset_name: str = "dlouapre/lichess_2025-01_1M",
248
- split: str = "train",
249
- column: str = "text",
250
- max_samples: Optional[int] = 10000,
251
- ) -> Dict[str, int]:
252
- """
253
- Count token frequencies in a dataset (useful for vocabulary analysis).
254
-
255
- Args:
256
- dataset_name: Name of the dataset on Hugging Face Hub.
257
- split: Dataset split to use.
258
- column: Column containing the game strings.
259
- max_samples: Maximum number of samples to process.
260
-
261
- Returns:
262
- Dictionary mapping tokens to their frequencies.
263
- """
264
- from collections import Counter
265
- from datasets import load_dataset
266
-
267
- dataset = load_dataset(dataset_name, split=split)
268
-
269
- if max_samples is not None:
270
- dataset = dataset.select(range(min(max_samples, len(dataset))))
271
-
272
- token_counts = Counter()
273
-
274
- for example in dataset:
275
- moves = example[column].strip().split()
276
- token_counts.update(moves)
277
-
278
- return dict(token_counts)
 
1
  """
2
+ Atomic Chess Tokenizer.
3
 
4
+ Decomposes chess moves into atomic components:
5
+ [Piece] + [Source] + [Destination] + [Suffix]
6
 
7
+ Example: "WPe2e4(x)" -> ["WP", "e2", "e4", "(x)"]
8
+
9
+ Benefits:
10
+ - Drastically reduces vocab size (~1200 -> ~90)
11
+ - Saves ~140k parameters in the embedding layer
12
+ - Allows the model to learn spatial relationships (e2 is close to e3)
13
  """
14
 
15
  from __future__ import annotations
16
 
17
  import json
18
  import os
19
+ import re
20
  from typing import Dict, List, Optional
21
 
22
  from transformers import PreTrainedTokenizer
23
 
 
24
  class ChessTokenizer(PreTrainedTokenizer):
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
  model_input_names = ["input_ids", "attention_mask"]
 
27
 
28
  # Special tokens
29
  PAD_TOKEN = "[PAD]"
 
31
  EOS_TOKEN = "[EOS]"
32
  UNK_TOKEN = "[UNK]"
33
 
34
+ # Regex to parse the extended UCI format
35
+ # Groups: 1=Piece, 2=Source, 3=Dest, 4=Suffix
36
+ MOVE_REGEX = re.compile(r"([WB][PNBRQK])([a-h][1-8])([a-h][1-8])(.*)")
37
+
38
  def __init__(
39
  self,
40
  vocab_file: Optional[str] = None,
41
  vocab: Optional[Dict[str, int]] = None,
42
  **kwargs,
43
  ):
 
 
 
 
 
 
 
 
 
44
  self._pad_token = self.PAD_TOKEN
45
  self._bos_token = self.BOS_TOKEN
46
  self._eos_token = self.EOS_TOKEN
47
  self._unk_token = self.UNK_TOKEN
48
+
49
+ # Clean kwargs
 
50
  kwargs.pop("pad_token", None)
51
  kwargs.pop("bos_token", None)
52
  kwargs.pop("eos_token", None)
53
  kwargs.pop("unk_token", None)
54
 
 
55
  if vocab is not None:
56
  self._vocab = vocab
57
  elif vocab_file is not None and os.path.exists(vocab_file):
58
  with open(vocab_file, "r", encoding="utf-8") as f:
59
  self._vocab = json.load(f)
60
  else:
61
+ self._vocab = self._create_atomic_vocab()
62
+
 
 
 
63
  self._ids_to_tokens = {v: k for k, v in self._vocab.items()}
64
 
 
65
  super().__init__(
66
  pad_token=self._pad_token,
67
  bos_token=self._bos_token,
 
69
  unk_token=self._unk_token,
70
  **kwargs,
71
  )
72
+
73
+ def _create_atomic_vocab(self) -> Dict[str, int]:
74
  """
75
+ Manually builds the vocabulary because we know the rules of Chess.
76
+ We don't need to learn this from the dataset.
 
 
77
  """
78
+ vocab = {}
79
+ idx = 0
80
+
81
+ # 1. Special Tokens
82
+ for token in [self.PAD_TOKEN, self.BOS_TOKEN, self.EOS_TOKEN, self.UNK_TOKEN]:
83
+ vocab[token] = idx
84
+ idx += 1
85
+
86
+ # 2. Pieces (Color + Type)
87
+ colors = ['W', 'B']
88
+ pieces = ['P', 'N', 'B', 'R', 'Q', 'K']
89
+ for c in colors:
90
+ for p in pieces:
91
+ vocab[f"{c}{p}"] = idx
92
+ idx += 1
93
+
94
+ # 3. Squares (a1 to h8)
95
+ files = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
96
+ ranks = ['1', '2', '3', '4', '5', '6', '7', '8']
97
+ for f in files:
98
+ for r in ranks:
99
+ vocab[f"{f}{r}"] = idx
100
+ idx += 1
101
+
102
+ # 4. Common Suffixes (derived from Lichess notation)
103
+ # (x)=capture, (+)=check, (#)=mate, (o)=castling
104
+ suffixes = ["(x)", "(+)", "(+*)", "(o)", "(O)", "=", "=Q", "=R", "=B", "=N"]
105
+ for s in suffixes:
106
+ vocab[s] = idx
107
+ idx += 1
108
+
109
  return vocab
110
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
  @property
112
  def vocab_size(self) -> int:
 
113
  return len(self._vocab)
114
+
115
  def get_vocab(self) -> Dict[str, int]:
 
116
  return dict(self._vocab)
117
+
118
  def _tokenize(self, text: str) -> List[str]:
119
  """
120
+ Splits a string of moves into atomic tokens.
121
+ "WPe2e4" -> ["WP", "e2", "e4"]
 
 
 
 
 
122
  """
123
+ raw_moves = text.strip().split()
124
+ tokens = []
125
+
126
+ for move in raw_moves:
127
+ match = self.MOVE_REGEX.match(move)
128
+ if match:
129
+ # Add piece, source, dest
130
+ tokens.extend([match.group(1), match.group(2), match.group(3)])
131
+ # Add suffix if it exists
132
+ suffix = match.group(4)
133
+ if suffix:
134
+ tokens.append(suffix)
135
+ else:
136
+ # Fallback for weird formatting (or UNK)
137
+ tokens.append(move)
138
+
139
+ return tokens
140
+
141
  def _convert_token_to_id(self, token: str) -> int:
142
+ return self._vocab.get(token, self._vocab.get(self.UNK_TOKEN))
143
+
 
144
  def _convert_id_to_token(self, index: int) -> str:
 
145
  return self._ids_to_tokens.get(index, self.UNK_TOKEN)
146
+
147
  def convert_tokens_to_string(self, tokens: List[str]) -> str:
 
 
 
 
 
 
 
 
 
 
148
  """
149
+ Reconstructs moves from atomic tokens.
150
+ This is tricky because we need to join them without spaces,
151
+ but add spaces between actual moves.
152
+ """
153
+ out = []
154
+ current_move = []
155
 
156
+ special = {self.PAD_TOKEN, self.BOS_TOKEN, self.EOS_TOKEN, self.UNK_TOKEN}
 
 
157
 
158
+ for t in tokens:
159
+ if t in special:
160
+ continue
161
+
162
+ current_move.append(t)
163
+
164
+ # Logic to decide when a move ends
165
+ # A move usually ends after a Suffix OR after a Destination square if no suffix follows
166
+ # This heuristic is simple: if we have a piece, src, and dest, check next token
167
+
168
+ # Simplified reconstruction:
169
+ # Just join everything and use a heuristic to insert spaces?
170
+ # Better: The model generates atomic tokens.
171
+ # We know a move starts with [WB][PNBRQK].
172
+
173
+ # Robust reconstruction approach:
174
+ full_str = "".join([t for t in tokens if t not in special])
175
+
176
+ # Insert space before every Piece token (except the first one)
177
+ # Regex lookbehind isn't strictly necessary, we can just replace
178
+ formatted = re.sub(r'(?<!^)([WB][PNBRQK])', r' \1', full_str)
179
+
180
+ return formatted
181
+
182
+ def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> tuple:
183
  if not os.path.isdir(save_directory):
184
  os.makedirs(save_directory, exist_ok=True)
 
185
  vocab_file = os.path.join(
186
+ save_directory, (filename_prefix + "-" if filename_prefix else "") + "vocab.json"
 
187
  )
 
188
  with open(vocab_file, "w", encoding="utf-8") as f:
189
  json.dump(self._vocab, f, ensure_ascii=False, indent=2)
 
190
  return (vocab_file,)
191
 
192
+ # We don't really need build_vocab_from_dataset anymore as we hardcoded the rules,
193
+ # but we keep the method signature to satisfy the template.
194
+ @classmethod
195
+ def build_vocab_from_dataset(cls, *args, **kwargs):
196
+ print("Note: Atomic tokenizer uses a static vocabulary rule set.")
197
+ return cls()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
vocab.json CHANGED
@@ -3,1204 +3,90 @@
3
  "[BOS]": 1,
4
  "[EOS]": 2,
5
  "[UNK]": 3,
6
- "BBa5b6": 4,
7
- "BBa6b7": 5,
8
- "BBb4a5": 6,
9
- "BBb4c3(x)": 7,
10
- "BBb4c3(x+)": 8,
11
- "BBb4c5": 9,
12
- "BBb4d2(x+)": 10,
13
- "BBb4d6": 11,
14
- "BBb4e7": 12,
15
- "BBb7a6": 13,
16
- "BBb7c6": 14,
17
- "BBb7c6(x)": 15,
18
- "BBb7c8": 16,
19
- "BBb7d5": 17,
20
- "BBb7d5(x)": 18,
21
- "BBb7e4(x)": 19,
22
- "BBb7f3(x)": 20,
23
- "BBb7g2(x)": 21,
24
- "BBc5a7": 22,
25
- "BBc5b4": 23,
26
- "BBc5b6": 24,
27
- "BBc5d4": 25,
28
- "BBc5d4(x)": 26,
29
- "BBc5d6": 27,
30
- "BBc5e3(x)": 28,
31
- "BBc5e7": 29,
32
- "BBc5f2(x+)": 30,
33
- "BBc8a6": 31,
34
- "BBc8b7": 32,
35
- "BBc8d7": 33,
36
- "BBc8d7(x)": 34,
37
- "BBc8e6": 35,
38
- "BBc8e6(x)": 36,
39
- "BBc8f5": 37,
40
- "BBc8f5(x)": 38,
41
- "BBc8g4": 39,
42
- "BBc8g4(x)": 40,
43
- "BBc8h3": 41,
44
- "BBc8h3(x)": 42,
45
- "BBd6b4": 43,
46
- "BBd6c5": 44,
47
- "BBd6c7": 45,
48
- "BBd6e5": 46,
49
- "BBd6e5(x)": 47,
50
- "BBd6e7": 48,
51
- "BBd6f4(x)": 49,
52
- "BBd6g3(x)": 50,
53
- "BBd7b5": 51,
54
- "BBd7b5(x)": 52,
55
- "BBd7c6": 53,
56
- "BBd7c6(x)": 54,
57
- "BBd7e6": 55,
58
- "BBd7e8": 56,
59
- "BBd7f5": 57,
60
- "BBd7g4": 58,
61
- "BBe6c4(x)": 59,
62
- "BBe6d5": 60,
63
- "BBe6d5(x)": 61,
64
- "BBe6d7": 62,
65
- "BBe6f5": 63,
66
- "BBe6f7": 64,
67
- "BBe6g4": 65,
68
- "BBe7b4": 66,
69
- "BBe7c5": 67,
70
- "BBe7c5(x)": 68,
71
- "BBe7d6": 69,
72
- "BBe7d6(x)": 70,
73
- "BBe7d8": 71,
74
- "BBe7f6": 72,
75
- "BBe7f6(x)": 73,
76
- "BBe7f8": 74,
77
- "BBe7g5": 75,
78
- "BBe7g5(x)": 76,
79
- "BBe7h4(x)": 77,
80
- "BBf5c2(x)": 78,
81
- "BBf5d3(x)": 79,
82
- "BBf5e4": 80,
83
- "BBf5e4(x)": 81,
84
- "BBf5e6": 82,
85
- "BBf5g4": 83,
86
- "BBf5g6": 84,
87
- "BBf6d4(x)": 85,
88
- "BBf6e5(x)": 86,
89
- "BBf6e7": 87,
90
- "BBf6g5": 88,
91
- "BBf6g7": 89,
92
- "BBf8b4": 90,
93
- "BBf8b4(+)": 91,
94
- "BBf8c5": 92,
95
- "BBf8c5(x)": 93,
96
- "BBf8d6": 94,
97
- "BBf8d6(x)": 95,
98
- "BBf8e7": 96,
99
- "BBf8g7": 97,
100
- "BBf8h6": 98,
101
- "BBg4d7": 99,
102
- "BBg4e2(x)": 100,
103
- "BBg4e6": 101,
104
- "BBg4f3(x)": 102,
105
- "BBg4f5": 103,
106
- "BBg4h5": 104,
107
- "BBg7b2(x)": 105,
108
- "BBg7c3(x)": 106,
109
- "BBg7d4(x)": 107,
110
- "BBg7e5(x)": 108,
111
- "BBg7f6": 109,
112
- "BBg7f6(x)": 110,
113
- "BBg7f8": 111,
114
- "BBg7h6": 112,
115
- "BBh5g6": 113,
116
- "BKb8a7": 114,
117
- "BKb8a8": 115,
118
- "BKb8c7": 116,
119
- "BKb8c8": 117,
120
- "BKc5b4": 118,
121
- "BKc6b5": 119,
122
- "BKc6b6": 120,
123
- "BKc6d5": 121,
124
- "BKc6d6": 122,
125
- "BKc7b6": 123,
126
- "BKc7b7": 124,
127
- "BKc7c6": 125,
128
- "BKc7d6": 126,
129
- "BKc7d7": 127,
130
- "BKc8b7": 128,
131
- "BKc8b8": 129,
132
- "BKc8c7": 130,
133
- "BKc8d7": 131,
134
- "BKc8d8": 132,
135
- "BKd5c4": 133,
136
- "BKd6c5": 134,
137
- "BKd6c6": 135,
138
- "BKd6c7": 136,
139
- "BKd6d5": 137,
140
- "BKd6e5": 138,
141
- "BKd6e6": 139,
142
- "BKd6e7": 140,
143
- "BKd7c6": 141,
144
- "BKd7c7": 142,
145
- "BKd7c8": 143,
146
- "BKd7d6": 144,
147
- "BKd7d8": 145,
148
- "BKd7e6": 146,
149
- "BKd7e7": 147,
150
- "BKd7e8": 148,
151
- "BKd8c7": 149,
152
- "BKd8c8": 150,
153
- "BKd8d7": 151,
154
- "BKd8e7": 152,
155
- "BKd8e8": 153,
156
- "BKe5d4": 154,
157
- "BKe6d5": 155,
158
- "BKe6d6": 156,
159
- "BKe6d7": 157,
160
- "BKe6e5": 158,
161
- "BKe6e7": 159,
162
- "BKe6f5": 160,
163
- "BKe6f6": 161,
164
- "BKe6f7": 162,
165
- "BKe7d6": 163,
166
- "BKe7d7": 164,
167
- "BKe7d8": 165,
168
- "BKe7e6": 166,
169
- "BKe7e8": 167,
170
- "BKe7f6": 168,
171
- "BKe7f7": 169,
172
- "BKe7f8": 170,
173
- "BKe8c8(O)": 171,
174
- "BKe8d7": 172,
175
- "BKe8d8": 173,
176
- "BKe8d8(x)": 174,
177
- "BKe8e7": 175,
178
- "BKe8f7": 176,
179
- "BKe8f7(x)": 177,
180
- "BKe8f8": 178,
181
- "BKe8g8(o)": 179,
182
- "BKf5e4": 180,
183
- "BKf5e6": 181,
184
- "BKf5f4": 182,
185
- "BKf5g4": 183,
186
- "BKf5g6": 184,
187
- "BKf6e5": 185,
188
- "BKf6e6": 186,
189
- "BKf6e7": 187,
190
- "BKf6f5": 188,
191
- "BKf6f7": 189,
192
- "BKf6g5": 190,
193
- "BKf6g6": 191,
194
- "BKf6g7": 192,
195
- "BKf7e6": 193,
196
- "BKf7e7": 194,
197
- "BKf7e8": 195,
198
- "BKf7f6": 196,
199
- "BKf7f8": 197,
200
- "BKf7g6": 198,
201
- "BKf7g7": 199,
202
- "BKf7g8": 200,
203
- "BKf8e7": 201,
204
- "BKf8e8": 202,
205
- "BKf8f7": 203,
206
- "BKf8g7": 204,
207
- "BKf8g8": 205,
208
- "BKg5f4": 206,
209
- "BKg5f6": 207,
210
- "BKg5g4": 208,
211
- "BKg6f5": 209,
212
- "BKg6f6": 210,
213
- "BKg6f7": 211,
214
- "BKg6g5": 212,
215
- "BKg6g7": 213,
216
- "BKg6h5": 214,
217
- "BKg7f6": 215,
218
- "BKg7f7": 216,
219
- "BKg7f8": 217,
220
- "BKg7g6": 218,
221
- "BKg7g8": 219,
222
- "BKg7h6": 220,
223
- "BKg7h7": 221,
224
- "BKg7h8": 222,
225
- "BKg8f7": 223,
226
- "BKg8f7(x)": 224,
227
- "BKg8f8": 225,
228
- "BKg8f8(x)": 226,
229
- "BKg8g7": 227,
230
- "BKg8g7(x)": 228,
231
- "BKg8h7": 229,
232
- "BKg8h7(x)": 230,
233
- "BKg8h8": 231,
234
- "BKh6g5": 232,
235
- "BKh6g7": 233,
236
- "BKh6h5": 234,
237
- "BKh7g6": 235,
238
- "BKh7g7": 236,
239
- "BKh7g8": 237,
240
- "BKh7h6": 238,
241
- "BKh7h8": 239,
242
- "BKh8g7": 240,
243
- "BKh8g8": 241,
244
- "BKh8h7": 242,
245
- "BNa5c4": 243,
246
- "BNa5c4(x)": 244,
247
- "BNa5c6": 245,
248
- "BNa6b4": 246,
249
- "BNa6c5": 247,
250
- "BNa6c7": 248,
251
- "BNb4c6": 249,
252
- "BNb4d3": 250,
253
- "BNb4d5": 251,
254
- "BNb6c4": 252,
255
- "BNb6d5": 253,
256
- "BNb6d7": 254,
257
- "BNb8a6": 255,
258
- "BNb8c6": 256,
259
- "BNb8c6(x)": 257,
260
- "BNb8d7": 258,
261
- "BNb8d7(x)": 259,
262
- "BNc2a1(x)": 260,
263
- "BNc5d7": 261,
264
- "BNc5e4": 262,
265
- "BNc5e4(x)": 263,
266
- "BNc5e6": 264,
267
- "BNc6a5": 265,
268
- "BNc6b4": 266,
269
- "BNc6b4(x)": 267,
270
- "BNc6b8": 268,
271
- "BNc6d4": 269,
272
- "BNc6d4(x)": 270,
273
- "BNc6d8": 271,
274
- "BNc6e5": 272,
275
- "BNc6e5(x)": 273,
276
- "BNc6e7": 274,
277
- "BNd4c6": 275,
278
- "BNd4e2(+)": 276,
279
- "BNd4e6": 277,
280
- "BNd4f3(x+)": 278,
281
- "BNd4f5": 279,
282
- "BNd5b4": 280,
283
- "BNd5b6": 281,
284
- "BNd5c3": 282,
285
- "BNd5c3(x)": 283,
286
- "BNd5e3": 284,
287
- "BNd5e3(x)": 285,
288
- "BNd5f4": 286,
289
- "BNd5f4(x)": 287,
290
- "BNd5f6": 288,
291
- "BNd7b6": 289,
292
- "BNd7c5": 290,
293
- "BNd7c5(x)": 291,
294
- "BNd7e5": 292,
295
- "BNd7e5(x)": 293,
296
- "BNd7f6": 294,
297
- "BNd7f6(x)": 295,
298
- "BNd7f8": 296,
299
- "BNe4c3(x)": 297,
300
- "BNe4c5": 298,
301
- "BNe4d2(x)": 299,
302
- "BNe4d6": 300,
303
- "BNe4f2(x)": 301,
304
- "BNe4f6": 302,
305
- "BNe4g3(x)": 303,
306
- "BNe4g5": 304,
307
- "BNe5c4": 305,
308
- "BNe5c4(x)": 306,
309
- "BNe5c6": 307,
310
- "BNe5d3": 308,
311
- "BNe5d3(x)": 309,
312
- "BNe5d7": 310,
313
- "BNe5f3(+)": 311,
314
- "BNe5f3(x+)": 312,
315
- "BNe5g4": 313,
316
- "BNe5g6": 314,
317
- "BNe7c6": 315,
318
- "BNe7d5": 316,
319
- "BNe7d5(x)": 317,
320
- "BNe7f5": 318,
321
- "BNe7f5(x)": 319,
322
- "BNe7g6": 320,
323
- "BNf5d4": 321,
324
- "BNf5d4(x)": 322,
325
- "BNf5e3(x)": 323,
326
- "BNf5h4": 324,
327
- "BNf6d5": 325,
328
- "BNf6d5(x)": 326,
329
- "BNf6d7": 327,
330
- "BNf6d7(x)": 328,
331
- "BNf6e4": 329,
332
- "BNf6e4(x)": 330,
333
- "BNf6e8": 331,
334
- "BNf6g4": 332,
335
- "BNf6g4(x)": 333,
336
- "BNf6g8": 334,
337
- "BNf6h5": 335,
338
- "BNf6h5(x)": 336,
339
- "BNf6h7": 337,
340
- "BNf8g6": 338,
341
- "BNg4e3": 339,
342
- "BNg4e3(x)": 340,
343
- "BNg4e5": 341,
344
- "BNg4e5(x)": 342,
345
- "BNg4f2(x)": 343,
346
- "BNg4f6": 344,
347
- "BNg4h6": 345,
348
- "BNg6e5": 346,
349
- "BNg6e5(x)": 347,
350
- "BNg6e7": 348,
351
- "BNg6f4": 349,
352
- "BNg8e7": 350,
353
- "BNg8f6": 351,
354
- "BNg8f6(x)": 352,
355
- "BNg8h6": 353,
356
- "BNh5f4": 354,
357
- "BNh5f6": 355,
358
- "BNh5g3(x)": 356,
359
- "BNh6f5": 357,
360
- "BPa2a1(Q)": 358,
361
- "BPa3a2": 359,
362
- "BPa4a3": 360,
363
- "BPa4b3(x)": 361,
364
- "BPa5a4": 362,
365
- "BPa5b4(x)": 363,
366
- "BPa6a5": 364,
367
- "BPa6b5(x)": 365,
368
- "BPa7a5": 366,
369
- "BPa7a6": 367,
370
- "BPa7b6(x)": 368,
371
- "BPb2b1(Q)": 369,
372
- "BPb3b2": 370,
373
- "BPb4b3": 371,
374
- "BPb4c3(x)": 372,
375
- "BPb5a4(x)": 373,
376
- "BPb5b4": 374,
377
- "BPb5c4(x)": 375,
378
- "BPb6a5(x)": 376,
379
- "BPb6b5": 377,
380
- "BPb6c5(x)": 378,
381
- "BPb7a6(x)": 379,
382
- "BPb7b5": 380,
383
- "BPb7b6": 381,
384
- "BPb7c6(x)": 382,
385
- "BPc3c2": 383,
386
- "BPc4b3(x)": 384,
387
- "BPc4c3": 385,
388
- "BPc4d3(x)": 386,
389
- "BPc5b4(x)": 387,
390
- "BPc5c4": 388,
391
- "BPc5d4(x)": 389,
392
- "BPc6b5(x)": 390,
393
- "BPc6c5": 391,
394
- "BPc6d5(x)": 392,
395
- "BPc7b6(x)": 393,
396
- "BPc7c5": 394,
397
- "BPc7c6": 395,
398
- "BPc7d6(x)": 396,
399
- "BPd3d2": 397,
400
- "BPd4c3(x)": 398,
401
- "BPd4d3": 399,
402
- "BPd4e3(x)": 400,
403
- "BPd5c4(x)": 401,
404
- "BPd5d4": 402,
405
- "BPd5e4(x)": 403,
406
- "BPd6c5(x)": 404,
407
- "BPd6d5": 405,
408
- "BPd6e5(x)": 406,
409
- "BPd7c6(x)": 407,
410
- "BPd7d5": 408,
411
- "BPd7d6": 409,
412
- "BPe3e2": 410,
413
- "BPe4d3(x)": 411,
414
- "BPe4e3": 412,
415
- "BPe4f3(x)": 413,
416
- "BPe5d4(x)": 414,
417
- "BPe5e4": 415,
418
- "BPe5f4(x)": 416,
419
- "BPe6d5(x)": 417,
420
- "BPe6e5": 418,
421
- "BPe6f5(x)": 419,
422
- "BPe7e5": 420,
423
- "BPe7e6": 421,
424
- "BPe7f6(x)": 422,
425
- "BPf3f2": 423,
426
- "BPf4f3": 424,
427
- "BPf4g3(x)": 425,
428
- "BPf5e4(x)": 426,
429
- "BPf5f4": 427,
430
- "BPf5g4(x)": 428,
431
- "BPf6e5(x)": 429,
432
- "BPf6f5": 430,
433
- "BPf6g5(x)": 431,
434
- "BPf7e6(x)": 432,
435
- "BPf7f5": 433,
436
- "BPf7f6": 434,
437
- "BPf7g6(x)": 435,
438
- "BPg3g2": 436,
439
- "BPg4f3(x)": 437,
440
- "BPg4g3": 438,
441
- "BPg5f4(x)": 439,
442
- "BPg5g4": 440,
443
- "BPg5h4(x)": 441,
444
- "BPg6f5(x)": 442,
445
- "BPg6g5": 443,
446
- "BPg6h5(x)": 444,
447
- "BPg7f6(x)": 445,
448
- "BPg7g5": 446,
449
- "BPg7g6": 447,
450
- "BPg7h6(x)": 448,
451
- "BPh2h1(Q)": 449,
452
- "BPh3h2": 450,
453
- "BPh4g3(x)": 451,
454
- "BPh4h3": 452,
455
- "BPh5g4(x)": 453,
456
- "BPh5h4": 454,
457
- "BPh6g5(x)": 455,
458
- "BPh6h5": 456,
459
- "BPh7g6(x)": 457,
460
- "BPh7h5": 458,
461
- "BPh7h6": 459,
462
- "BQa5b6": 460,
463
- "BQa5c7": 461,
464
- "BQb6b2(x)": 462,
465
- "BQb6c7": 463,
466
- "BQb6d8": 464,
467
- "BQc7b6": 465,
468
- "BQc7c6": 466,
469
- "BQc7d7": 467,
470
- "BQc7d8": 468,
471
- "BQc7e5(x)": 469,
472
- "BQc7e7": 470,
473
- "BQd5a5": 471,
474
- "BQd5d8": 472,
475
- "BQd6d7": 473,
476
- "BQd6e7": 474,
477
- "BQd7c6": 475,
478
- "BQd7c7": 476,
479
- "BQd7d6": 477,
480
- "BQd7e6": 478,
481
- "BQd7e7": 479,
482
- "BQd8a5": 480,
483
- "BQd8a5(+)": 481,
484
- "BQd8b6": 482,
485
- "BQd8c7": 483,
486
- "BQd8c8": 484,
487
- "BQd8d1(x+)": 485,
488
- "BQd8d4(x)": 486,
489
- "BQd8d5": 487,
490
- "BQd8d5(x)": 488,
491
- "BQd8d6": 489,
492
- "BQd8d6(x)": 490,
493
- "BQd8d7": 491,
494
- "BQd8d7(x)": 492,
495
- "BQd8e7": 493,
496
- "BQd8e7(x)": 494,
497
- "BQd8e8": 495,
498
- "BQd8f6": 496,
499
- "BQd8f6(x)": 497,
500
- "BQd8g5": 498,
501
- "BQd8g5(x)": 499,
502
- "BQd8h4": 500,
503
- "BQd8h4(+)": 501,
504
- "BQe7d6": 502,
505
- "BQe7d7": 503,
506
- "BQe7e6": 504,
507
- "BQe7f6": 505,
508
- "BQe7f7": 506,
509
- "BQe7g5": 507,
510
- "BQf6e7": 508,
511
- "BQf6g6": 509,
512
- "BRa8a7": 510,
513
- "BRa8b8": 511,
514
- "BRa8c8": 512,
515
- "BRa8d8": 513,
516
- "BRa8d8(x)": 514,
517
- "BRa8e8": 515,
518
- "BRa8e8(x)": 516,
519
- "BRa8f8": 517,
520
- "BRa8f8(x)": 518,
521
- "BRa8g8": 519,
522
- "BRb2a2(x)": 520,
523
- "BRb8a8": 521,
524
- "BRb8b2(x)": 522,
525
- "BRb8b6": 523,
526
- "BRb8b7": 524,
527
- "BRb8c8": 525,
528
- "BRb8d8": 526,
529
- "BRb8e8": 527,
530
- "BRc8a8": 528,
531
- "BRc8b8": 529,
532
- "BRc8c1(x)": 530,
533
- "BRc8c2": 531,
534
- "BRc8c2(x)": 532,
535
- "BRc8c3(x)": 533,
536
- "BRc8c4": 534,
537
- "BRc8c4(x)": 535,
538
- "BRc8c6": 536,
539
- "BRc8c6(x)": 537,
540
- "BRc8c7": 538,
541
- "BRc8d8": 539,
542
- "BRc8e8": 540,
543
- "BRc8f8": 541,
544
- "BRd8a8": 542,
545
- "BRd8b8": 543,
546
- "BRd8c8": 544,
547
- "BRd8d1(x)": 545,
548
- "BRd8d1(x+)": 546,
549
- "BRd8d2": 547,
550
- "BRd8d2(x)": 548,
551
- "BRd8d3": 549,
552
- "BRd8d3(x)": 550,
553
- "BRd8d4": 551,
554
- "BRd8d4(x)": 552,
555
- "BRd8d5": 553,
556
- "BRd8d5(x)": 554,
557
- "BRd8d6": 555,
558
- "BRd8d6(x)": 556,
559
- "BRd8d7": 557,
560
- "BRd8d7(x)": 558,
561
- "BRd8e8": 559,
562
- "BRd8f8": 560,
563
- "BRd8g8": 561,
564
- "BRe8b8": 562,
565
- "BRe8c8": 563,
566
- "BRe8d8": 564,
567
- "BRe8e1(x+)": 565,
568
- "BRe8e2": 566,
569
- "BRe8e3(x)": 567,
570
- "BRe8e4": 568,
571
- "BRe8e4(x)": 569,
572
- "BRe8e5": 570,
573
- "BRe8e5(x)": 571,
574
- "BRe8e6": 572,
575
- "BRe8e6(x)": 573,
576
- "BRe8e7": 574,
577
- "BRe8e7(x)": 575,
578
- "BRe8f8": 576,
579
- "BRe8g8": 577,
580
- "BRf8a8": 578,
581
- "BRf8b8": 579,
582
- "BRf8c8": 580,
583
- "BRf8d8": 581,
584
- "BRf8d8(x)": 582,
585
- "BRf8e8": 583,
586
- "BRf8e8(+)": 584,
587
- "BRf8f1(x+)": 585,
588
- "BRf8f3(x)": 586,
589
- "BRf8f4(x)": 587,
590
- "BRf8f5": 588,
591
- "BRf8f5(x)": 589,
592
- "BRf8f6": 590,
593
- "BRf8f6(x)": 591,
594
- "BRf8f7": 592,
595
- "BRf8f7(x)": 593,
596
- "BRf8g8": 594,
597
- "BRf8h8": 595,
598
- "BRg8g7": 596,
599
- "BRh8c8": 597,
600
- "BRh8d8": 598,
601
- "BRh8e8": 599,
602
- "BRh8f8": 600,
603
- "BRh8g8": 601,
604
- "BRh8h7": 602,
605
- "WBa4b3": 603,
606
- "WBb2a3": 604,
607
- "WBb2c1": 605,
608
- "WBb2c3": 606,
609
- "WBb2d4(x)": 607,
610
- "WBb2e5(x)": 608,
611
- "WBb2f6(x)": 609,
612
- "WBb2g7(x)": 610,
613
- "WBb3c2": 611,
614
- "WBb3d5(x)": 612,
615
- "WBb5a4": 613,
616
- "WBb5c4": 614,
617
- "WBb5c6(x)": 615,
618
- "WBb5c6(x+)": 616,
619
- "WBb5d3": 617,
620
- "WBb5d7(x+)": 618,
621
- "WBb5e2": 619,
622
- "WBc1a3": 620,
623
- "WBc1b2": 621,
624
- "WBc1d2": 622,
625
- "WBc1e3": 623,
626
- "WBc1e3(x)": 624,
627
- "WBc1f4": 625,
628
- "WBc1f4(x)": 626,
629
- "WBc1g5": 627,
630
- "WBc1g5(x)": 628,
631
- "WBc1h6": 629,
632
- "WBc1h6(x)": 630,
633
- "WBc2b3": 631,
634
- "WBc4a2": 632,
635
- "WBc4b3": 633,
636
- "WBc4b5": 634,
637
- "WBc4b5(+)": 635,
638
- "WBc4d3": 636,
639
- "WBc4d5": 637,
640
- "WBc4d5(x)": 638,
641
- "WBc4e2": 639,
642
- "WBc4e6(x)": 640,
643
- "WBc4f7(x+)": 641,
644
- "WBd2b4": 642,
645
- "WBd2b4(x)": 643,
646
- "WBd2c3": 644,
647
- "WBd2c3(x)": 645,
648
- "WBd2e1": 646,
649
- "WBd2e3": 647,
650
- "WBd2f4": 648,
651
- "WBd2g5": 649,
652
- "WBd3b1": 650,
653
- "WBd3b5": 651,
654
- "WBd3c2": 652,
655
- "WBd3c4": 653,
656
- "WBd3c4(x)": 654,
657
- "WBd3e2": 655,
658
- "WBd3e4": 656,
659
- "WBd3e4(x)": 657,
660
- "WBd3f5(x)": 658,
661
- "WBd3g6(x)": 659,
662
- "WBd3h7(x+)": 660,
663
- "WBd4e3": 661,
664
- "WBe2b5": 662,
665
- "WBe2c4": 663,
666
- "WBe2c4(x)": 664,
667
- "WBe2d3": 665,
668
- "WBe2f1": 666,
669
- "WBe2f3": 667,
670
- "WBe2f3(x)": 668,
671
- "WBe2g4": 669,
672
- "WBe2g4(x)": 670,
673
- "WBe3a7(x)": 671,
674
- "WBe3b6(x)": 672,
675
- "WBe3c5": 673,
676
- "WBe3c5(x)": 674,
677
- "WBe3d2": 675,
678
- "WBe3d4": 676,
679
- "WBe3d4(x)": 677,
680
- "WBe3f2": 678,
681
- "WBe3f4": 679,
682
- "WBe3f4(x)": 680,
683
- "WBe3g5": 681,
684
- "WBe3h6": 682,
685
- "WBe3h6(x)": 683,
686
- "WBe4d3": 684,
687
- "WBe5f6(x)": 685,
688
- "WBf1b5": 686,
689
- "WBf1b5(+)": 687,
690
- "WBf1c4": 688,
691
- "WBf1c4(x)": 689,
692
- "WBf1d3": 690,
693
- "WBf1e2": 691,
694
- "WBf1g2": 692,
695
- "WBf1h3": 693,
696
- "WBf3e2": 694,
697
- "WBf3e4(x)": 695,
698
- "WBf4c7(x)": 696,
699
- "WBf4d2": 697,
700
- "WBf4d6(x)": 698,
701
- "WBf4e3": 699,
702
- "WBf4e5": 700,
703
- "WBf4e5(x)": 701,
704
- "WBf4g3": 702,
705
- "WBf4g5": 703,
706
- "WBf4h2": 704,
707
- "WBg2b7(x)": 705,
708
- "WBg2d5(x)": 706,
709
- "WBg2e4(x)": 707,
710
- "WBg2f1": 708,
711
- "WBg2f3": 709,
712
- "WBg2f3(x)": 710,
713
- "WBg2h3": 711,
714
- "WBg3e5(x)": 712,
715
- "WBg5d2": 713,
716
- "WBg5d8(x)": 714,
717
- "WBg5e3": 715,
718
- "WBg5e7(x)": 716,
719
- "WBg5f4": 717,
720
- "WBg5f6(x)": 718,
721
- "WBg5h4": 719,
722
- "WBg5h6": 720,
723
- "WBh4g3": 721,
724
- "WBh6g7(x)": 722,
725
- "WKb1a1": 723,
726
- "WKb1a2": 724,
727
- "WKb1c1": 725,
728
- "WKb1c2": 726,
729
- "WKc1b1": 727,
730
- "WKc1b2": 728,
731
- "WKc1c2": 729,
732
- "WKc1d1": 730,
733
- "WKc1d2": 731,
734
- "WKc2b3": 732,
735
- "WKc2c3": 733,
736
- "WKc2d2": 734,
737
- "WKc2d3": 735,
738
- "WKc3b4": 736,
739
- "WKd1c1": 737,
740
- "WKd1c2": 738,
741
- "WKd1d2": 739,
742
- "WKd1e1": 740,
743
- "WKd1e2": 741,
744
- "WKd2c1": 742,
745
- "WKd2c2": 743,
746
- "WKd2c3": 744,
747
- "WKd2d3": 745,
748
- "WKd2e1": 746,
749
- "WKd2e2": 747,
750
- "WKd2e3": 748,
751
- "WKd3c2": 749,
752
- "WKd3c3": 750,
753
- "WKd3c4": 751,
754
- "WKd3d4": 752,
755
- "WKd3e2": 753,
756
- "WKd3e3": 754,
757
- "WKd3e4": 755,
758
- "WKd4c5": 756,
759
- "WKe1c1(O)": 757,
760
- "WKe1d1": 758,
761
- "WKe1d1(x)": 759,
762
- "WKe1d2": 760,
763
- "WKe1e2": 761,
764
- "WKe1f1": 762,
765
- "WKe1f2": 763,
766
- "WKe1f2(x)": 764,
767
- "WKe1g1(o)": 765,
768
- "WKe2d1": 766,
769
- "WKe2d2": 767,
770
- "WKe2d3": 768,
771
- "WKe2e1": 769,
772
- "WKe2e3": 770,
773
- "WKe2f1": 771,
774
- "WKe2f2": 772,
775
- "WKe2f3": 773,
776
- "WKe3d2": 774,
777
- "WKe3d3": 775,
778
- "WKe3d4": 776,
779
- "WKe3e2": 777,
780
- "WKe3e4": 778,
781
- "WKe3f2": 779,
782
- "WKe3f3": 780,
783
- "WKe3f4": 781,
784
- "WKe4d5": 782,
785
- "WKe4f5": 783,
786
- "WKf1e1": 784,
787
- "WKf1e2": 785,
788
- "WKf1f2": 786,
789
- "WKf1g1": 787,
790
- "WKf1g2": 788,
791
- "WKf2e1": 789,
792
- "WKf2e2": 790,
793
- "WKf2e3": 791,
794
- "WKf2f1": 792,
795
- "WKf2f3": 793,
796
- "WKf2g1": 794,
797
- "WKf2g2": 795,
798
- "WKf2g3": 796,
799
- "WKf3e2": 797,
800
- "WKf3e3": 798,
801
- "WKf3e4": 799,
802
- "WKf3f2": 800,
803
- "WKf3f4": 801,
804
- "WKf3g2": 802,
805
- "WKf3g3": 803,
806
- "WKf3g4": 804,
807
- "WKf4e3": 805,
808
- "WKf4e5": 806,
809
- "WKf4g3": 807,
810
- "WKf4g5": 808,
811
- "WKg1f1": 809,
812
- "WKg1f1(x)": 810,
813
- "WKg1f2": 811,
814
- "WKg1f2(x)": 812,
815
- "WKg1g2": 813,
816
- "WKg1g2(x)": 814,
817
- "WKg1h1": 815,
818
- "WKg1h2": 816,
819
- "WKg2f1": 817,
820
- "WKg2f2": 818,
821
- "WKg2f3": 819,
822
- "WKg2g1": 820,
823
- "WKg2g3": 821,
824
- "WKg2h1": 822,
825
- "WKg2h2": 823,
826
- "WKg2h3": 824,
827
- "WKg3f2": 825,
828
- "WKg3f3": 826,
829
- "WKg3f4": 827,
830
- "WKg3g2": 828,
831
- "WKg3g4": 829,
832
- "WKg3h2": 830,
833
- "WKg3h3": 831,
834
- "WKg3h4": 832,
835
- "WKg4f3": 833,
836
- "WKg4f5": 834,
837
- "WKg4g5": 835,
838
- "WKh1g1": 836,
839
- "WKh1g2": 837,
840
- "WKh1h2": 838,
841
- "WKh2g1": 839,
842
- "WKh2g2": 840,
843
- "WKh2g3": 841,
844
- "WKh2h1": 842,
845
- "WKh2h3": 843,
846
- "WKh3g2": 844,
847
- "WKh3g4": 845,
848
- "WKh3h4": 846,
849
- "WNa3b5": 847,
850
- "WNa3c2": 848,
851
- "WNa3c4": 849,
852
- "WNa4c3": 850,
853
- "WNa4c5": 851,
854
- "WNb1a3": 852,
855
- "WNb1c3": 853,
856
- "WNb1c3(x)": 854,
857
- "WNb1d2": 855,
858
- "WNb3c5": 856,
859
- "WNb3d2": 857,
860
- "WNb3d4": 858,
861
- "WNb5c3": 859,
862
- "WNb5d4": 860,
863
- "WNb5d6": 861,
864
- "WNc3a4": 862,
865
- "WNc3b1": 863,
866
- "WNc3b5": 864,
867
- "WNc3b5(x)": 865,
868
- "WNc3d1": 866,
869
- "WNc3d5": 867,
870
- "WNc3d5(x)": 868,
871
- "WNc3e2": 869,
872
- "WNc3e4": 870,
873
- "WNc3e4(x)": 871,
874
- "WNc4e3": 872,
875
- "WNc4e5": 873,
876
- "WNc7a8(x)": 874,
877
- "WNd2b3": 875,
878
- "WNd2c4": 876,
879
- "WNd2c4(x)": 877,
880
- "WNd2e4": 878,
881
- "WNd2e4(x)": 879,
882
- "WNd2f1": 880,
883
- "WNd2f3": 881,
884
- "WNd2f3(x)": 882,
885
- "WNd4b3": 883,
886
- "WNd4b5": 884,
887
- "WNd4c6": 885,
888
- "WNd4c6(x)": 886,
889
- "WNd4e6": 887,
890
- "WNd4e6(x)": 888,
891
- "WNd4f3": 889,
892
- "WNd4f5": 890,
893
- "WNd5c3": 891,
894
- "WNd5e3": 892,
895
- "WNd5f4": 893,
896
- "WNd5f6(x+)": 894,
897
- "WNe2c3": 895,
898
- "WNe2d4": 896,
899
- "WNe2d4(x)": 897,
900
- "WNe2f4": 898,
901
- "WNe2f4(x)": 899,
902
- "WNe2g3": 900,
903
- "WNe4c3": 901,
904
- "WNe4c5": 902,
905
- "WNe4c5(x)": 903,
906
- "WNe4d6": 904,
907
- "WNe4d6(x)": 905,
908
- "WNe4f6(+)": 906,
909
- "WNe4f6(x+)": 907,
910
- "WNe4g3": 908,
911
- "WNe4g5": 909,
912
- "WNe5c4": 910,
913
- "WNe5c6": 911,
914
- "WNe5c6(x)": 912,
915
- "WNe5d3": 913,
916
- "WNe5d7": 914,
917
- "WNe5d7(x)": 915,
918
- "WNe5f3": 916,
919
- "WNe5f7(x)": 917,
920
- "WNe5g4": 918,
921
- "WNe5g6(x)": 919,
922
- "WNf1g3": 920,
923
- "WNf3d2": 921,
924
- "WNf3d4": 922,
925
- "WNf3d4(x)": 923,
926
- "WNf3e1": 924,
927
- "WNf3e5": 925,
928
- "WNf3e5(x)": 926,
929
- "WNf3g1": 927,
930
- "WNf3g5": 928,
931
- "WNf3g5(+)": 929,
932
- "WNf3g5(x)": 930,
933
- "WNf3h2": 931,
934
- "WNf3h4": 932,
935
- "WNf3h4(x)": 933,
936
- "WNf7h8(x)": 934,
937
- "WNg1e2": 935,
938
- "WNg1f3": 936,
939
- "WNg1h3": 937,
940
- "WNg3e2": 938,
941
- "WNg3e4": 939,
942
- "WNg3f5": 940,
943
- "WNg3h5": 941,
944
- "WNg5e4": 942,
945
- "WNg5e4(x)": 943,
946
- "WNg5e6": 944,
947
- "WNg5e6(x)": 945,
948
- "WNg5f3": 946,
949
- "WNg5f7(x)": 947,
950
- "WNg5h3": 948,
951
- "WNh2f3": 949,
952
- "WNh2g4": 950,
953
- "WNh3f4": 951,
954
- "WNh4f3": 952,
955
- "WNh4f5": 953,
956
- "WNh4g6(x)": 954,
957
- "WPa2a3": 955,
958
- "WPa2a4": 956,
959
- "WPa2b3(x)": 957,
960
- "WPa3a4": 958,
961
- "WPa3b4(x)": 959,
962
- "WPa4a5": 960,
963
- "WPa4b5(x)": 961,
964
- "WPa5a6": 962,
965
- "WPa5b6(x)": 963,
966
- "WPa6a7": 964,
967
- "WPa7a8(Q)": 965,
968
- "WPb2a3(x)": 966,
969
- "WPb2b3": 967,
970
- "WPb2b4": 968,
971
- "WPb2c3(x)": 969,
972
- "WPb3a4(x)": 970,
973
- "WPb3b4": 971,
974
- "WPb3c4(x)": 972,
975
- "WPb4a5(x)": 973,
976
- "WPb4b5": 974,
977
- "WPb4c5(x)": 975,
978
- "WPb5b6": 976,
979
- "WPb5c6(x)": 977,
980
- "WPb6b7": 978,
981
- "WPb7b8(Q)": 979,
982
- "WPc2b3(x)": 980,
983
- "WPc2c3": 981,
984
- "WPc2c4": 982,
985
- "WPc2d3(x)": 983,
986
- "WPc3b4(x)": 984,
987
- "WPc3c4": 985,
988
- "WPc3d4(x)": 986,
989
- "WPc4b5(x)": 987,
990
- "WPc4c5": 988,
991
- "WPc4d5(x)": 989,
992
- "WPc5b6(x)": 990,
993
- "WPc5c6": 991,
994
- "WPc5d6(x)": 992,
995
- "WPc6c7": 993,
996
- "WPd2d3": 994,
997
- "WPd2d4": 995,
998
- "WPd3c4(x)": 996,
999
- "WPd3d4": 997,
1000
- "WPd3e4(x)": 998,
1001
- "WPd4c5(x)": 999,
1002
- "WPd4d5": 1000,
1003
- "WPd4e5(x)": 1001,
1004
- "WPd5c6(x)": 1002,
1005
- "WPd5d6": 1003,
1006
- "WPd5e6(x)": 1004,
1007
- "WPd6d7": 1005,
1008
- "WPe2e3": 1006,
1009
- "WPe2e4": 1007,
1010
- "WPe3d4(x)": 1008,
1011
- "WPe3e4": 1009,
1012
- "WPe3f4(x)": 1010,
1013
- "WPe4d5(x)": 1011,
1014
- "WPe4e5": 1012,
1015
- "WPe4f5(x)": 1013,
1016
- "WPe5d6(x)": 1014,
1017
- "WPe5e6": 1015,
1018
- "WPe5f6(x)": 1016,
1019
- "WPe6e7": 1017,
1020
- "WPf2e3(x)": 1018,
1021
- "WPf2f3": 1019,
1022
- "WPf2f4": 1020,
1023
- "WPf2g3(x)": 1021,
1024
- "WPf3e4(x)": 1022,
1025
- "WPf3f4": 1023,
1026
- "WPf3g4(x)": 1024,
1027
- "WPf4e5(x)": 1025,
1028
- "WPf4f5": 1026,
1029
- "WPf4g5(x)": 1027,
1030
- "WPf5e6(x)": 1028,
1031
- "WPf5f6": 1029,
1032
- "WPf5g6(x)": 1030,
1033
- "WPf6f7": 1031,
1034
- "WPg2f3(x)": 1032,
1035
- "WPg2g3": 1033,
1036
- "WPg2g4": 1034,
1037
- "WPg2h3(x)": 1035,
1038
- "WPg3f4(x)": 1036,
1039
- "WPg3g4": 1037,
1040
- "WPg3h4(x)": 1038,
1041
- "WPg4f5(x)": 1039,
1042
- "WPg4g5": 1040,
1043
- "WPg4h5(x)": 1041,
1044
- "WPg5f6(x)": 1042,
1045
- "WPg5g6": 1043,
1046
- "WPg5h6(x)": 1044,
1047
- "WPg6g7": 1045,
1048
- "WPh2g3(x)": 1046,
1049
- "WPh2h3": 1047,
1050
- "WPh2h4": 1048,
1051
- "WPh3g4(x)": 1049,
1052
- "WPh3h4": 1050,
1053
- "WPh4g5(x)": 1051,
1054
- "WPh4h5": 1052,
1055
- "WPh5g6(x)": 1053,
1056
- "WPh5h6": 1054,
1057
- "WPh6h7": 1055,
1058
- "WPh7h8(Q)": 1056,
1059
- "WQa4b3": 1057,
1060
- "WQb3b7(x)": 1058,
1061
- "WQb3c2": 1059,
1062
- "WQc2b3": 1060,
1063
- "WQc2d2": 1061,
1064
- "WQc2d3": 1062,
1065
- "WQc2e2": 1063,
1066
- "WQc2e4(x)": 1064,
1067
- "WQd1a4": 1065,
1068
- "WQd1a4(+)": 1066,
1069
- "WQd1b3": 1067,
1070
- "WQd1c1": 1068,
1071
- "WQd1c2": 1069,
1072
- "WQd1d2": 1070,
1073
- "WQd1d2(x)": 1071,
1074
- "WQd1d3": 1072,
1075
- "WQd1d3(x)": 1073,
1076
- "WQd1d4": 1074,
1077
- "WQd1d4(x)": 1075,
1078
- "WQd1d5(x)": 1076,
1079
- "WQd1d8(x+)": 1077,
1080
- "WQd1e1": 1078,
1081
- "WQd1e2": 1079,
1082
- "WQd1e2(+)": 1080,
1083
- "WQd1e2(x)": 1081,
1084
- "WQd1f3": 1082,
1085
- "WQd1f3(x)": 1083,
1086
- "WQd1g4": 1084,
1087
- "WQd1g4(x)": 1085,
1088
- "WQd1h5": 1086,
1089
- "WQd1h5(+)": 1087,
1090
- "WQd2c2": 1088,
1091
- "WQd2c3": 1089,
1092
- "WQd2d3": 1090,
1093
- "WQd2e2": 1091,
1094
- "WQd2e3": 1092,
1095
- "WQd2f4": 1093,
1096
- "WQd3d2": 1094,
1097
- "WQd3e2": 1095,
1098
- "WQd3e3": 1096,
1099
- "WQd4d1": 1097,
1100
- "WQd4e3": 1098,
1101
- "WQe2d2": 1099,
1102
- "WQe2d3": 1100,
1103
- "WQe2e3": 1101,
1104
- "WQe2e4(x)": 1102,
1105
- "WQe2f2": 1103,
1106
- "WQe2f3": 1104,
1107
- "WQe2g4": 1105,
1108
- "WQf3d1": 1106,
1109
- "WQf3e2": 1107,
1110
- "WQf3e3": 1108,
1111
- "WQf3f4": 1109,
1112
- "WQf3f6(x)": 1110,
1113
- "WQf3g3": 1111,
1114
- "WQh5f3": 1112,
1115
- "WRa1a2": 1113,
1116
- "WRa1b1": 1114,
1117
- "WRa1c1": 1115,
1118
- "WRa1d1": 1116,
1119
- "WRa1d1(x)": 1117,
1120
- "WRa1e1": 1118,
1121
- "WRa1e1(x)": 1119,
1122
- "WRa1f1": 1120,
1123
- "WRa1f1(x)": 1121,
1124
- "WRa1g1": 1122,
1125
- "WRb1a1": 1123,
1126
- "WRb1b3": 1124,
1127
- "WRb1b7(x)": 1125,
1128
- "WRb1c1": 1126,
1129
- "WRb1d1": 1127,
1130
- "WRb1e1": 1128,
1131
- "WRc1a1": 1129,
1132
- "WRc1b1": 1130,
1133
- "WRc1c2": 1131,
1134
- "WRc1c3": 1132,
1135
- "WRc1c6(x)": 1133,
1136
- "WRc1c7": 1134,
1137
- "WRc1d1": 1135,
1138
- "WRc1e1": 1136,
1139
- "WRc1f1": 1137,
1140
- "WRd1a1": 1138,
1141
- "WRd1b1": 1139,
1142
- "WRd1c1": 1140,
1143
- "WRd1d2": 1141,
1144
- "WRd1d2(x)": 1142,
1145
- "WRd1d3": 1143,
1146
- "WRd1d3(x)": 1144,
1147
- "WRd1d4": 1145,
1148
- "WRd1d4(x)": 1146,
1149
- "WRd1d5": 1147,
1150
- "WRd1d5(x)": 1148,
1151
- "WRd1d6": 1149,
1152
- "WRd1d6(x)": 1150,
1153
- "WRd1d7": 1151,
1154
- "WRd1d7(x)": 1152,
1155
- "WRd1d8(x)": 1153,
1156
- "WRd1d8(x+)": 1154,
1157
- "WRd1e1": 1155,
1158
- "WRd1f1": 1156,
1159
- "WRd1g1": 1157,
1160
- "WRd1h1": 1158,
1161
- "WRe1b1": 1159,
1162
- "WRe1c1": 1160,
1163
- "WRe1d1": 1161,
1164
- "WRe1d1(x)": 1162,
1165
- "WRe1e2": 1163,
1166
- "WRe1e2(x)": 1164,
1167
- "WRe1e3": 1165,
1168
- "WRe1e3(x)": 1166,
1169
- "WRe1e4": 1167,
1170
- "WRe1e4(x)": 1168,
1171
- "WRe1e5": 1169,
1172
- "WRe1e5(x)": 1170,
1173
- "WRe1e6(x)": 1171,
1174
- "WRe1e7": 1172,
1175
- "WRe1e7(x)": 1173,
1176
- "WRe1e8(x)": 1174,
1177
- "WRe1e8(x+)": 1175,
1178
- "WRe1f1": 1176,
1179
- "WRe1g1": 1177,
1180
- "WRf1a1": 1178,
1181
- "WRf1b1": 1179,
1182
- "WRf1c1": 1180,
1183
- "WRf1d1": 1181,
1184
- "WRf1d1(x)": 1182,
1185
- "WRf1e1": 1183,
1186
- "WRf1e1(+)": 1184,
1187
- "WRf1f2": 1185,
1188
- "WRf1f2(x)": 1186,
1189
- "WRf1f3": 1187,
1190
- "WRf1f3(x)": 1188,
1191
- "WRf1f4": 1189,
1192
- "WRf1f4(x)": 1190,
1193
- "WRf1f5(x)": 1191,
1194
- "WRf1f6(x)": 1192,
1195
- "WRf1f8(x+)": 1193,
1196
- "WRf1g1": 1194,
1197
- "WRf1h1": 1195,
1198
- "WRf3g3": 1196,
1199
- "WRg1g2": 1197,
1200
- "WRh1c1": 1198,
1201
- "WRh1d1": 1199,
1202
- "WRh1e1": 1200,
1203
- "WRh1f1": 1201,
1204
- "WRh1g1": 1202,
1205
- "WRh1h2": 1203
1206
  }
 
3
  "[BOS]": 1,
4
  "[EOS]": 2,
5
  "[UNK]": 3,
6
+ "WP": 4,
7
+ "WN": 5,
8
+ "WB": 6,
9
+ "WR": 7,
10
+ "WQ": 8,
11
+ "WK": 9,
12
+ "BP": 10,
13
+ "BN": 11,
14
+ "BB": 12,
15
+ "BR": 13,
16
+ "BQ": 14,
17
+ "BK": 15,
18
+ "a1": 16,
19
+ "a2": 17,
20
+ "a3": 18,
21
+ "a4": 19,
22
+ "a5": 20,
23
+ "a6": 21,
24
+ "a7": 22,
25
+ "a8": 23,
26
+ "b1": 24,
27
+ "b2": 25,
28
+ "b3": 26,
29
+ "b4": 27,
30
+ "b5": 28,
31
+ "b6": 29,
32
+ "b7": 30,
33
+ "b8": 31,
34
+ "c1": 32,
35
+ "c2": 33,
36
+ "c3": 34,
37
+ "c4": 35,
38
+ "c5": 36,
39
+ "c6": 37,
40
+ "c7": 38,
41
+ "c8": 39,
42
+ "d1": 40,
43
+ "d2": 41,
44
+ "d3": 42,
45
+ "d4": 43,
46
+ "d5": 44,
47
+ "d6": 45,
48
+ "d7": 46,
49
+ "d8": 47,
50
+ "e1": 48,
51
+ "e2": 49,
52
+ "e3": 50,
53
+ "e4": 51,
54
+ "e5": 52,
55
+ "e6": 53,
56
+ "e7": 54,
57
+ "e8": 55,
58
+ "f1": 56,
59
+ "f2": 57,
60
+ "f3": 58,
61
+ "f4": 59,
62
+ "f5": 60,
63
+ "f6": 61,
64
+ "f7": 62,
65
+ "f8": 63,
66
+ "g1": 64,
67
+ "g2": 65,
68
+ "g3": 66,
69
+ "g4": 67,
70
+ "g5": 68,
71
+ "g6": 69,
72
+ "g7": 70,
73
+ "g8": 71,
74
+ "h1": 72,
75
+ "h2": 73,
76
+ "h3": 74,
77
+ "h4": 75,
78
+ "h5": 76,
79
+ "h6": 77,
80
+ "h7": 78,
81
+ "h8": 79,
82
+ "(x)": 80,
83
+ "(+)": 81,
84
+ "(+*)": 82,
85
+ "(o)": 83,
86
+ "(O)": 84,
87
+ "=": 85,
88
+ "=Q": 86,
89
+ "=R": 87,
90
+ "=B": 88,
91
+ "=N": 89
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  }