MDaytek commited on
Commit
29536e2
·
verified ·
1 Parent(s): f591dec

Nuclear Option Submission

Browse files
README.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ tags:
4
+ - chess-challenge
5
+ - chess
6
+ license: mit
7
+ ---
8
+ # Sanity Check Zero-Dependency
9
+ Generated from scratch without local files.
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "GPT2LMHeadModel"
5
+ ],
6
+ "attn_pdrop": 0.1,
7
+ "bos_token_id": 1,
8
+ "dtype": "float32",
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 2,
11
+ "initializer_range": 0.02,
12
+ "layer_norm_epsilon": 1e-05,
13
+ "model_type": "gpt2",
14
+ "n_embd": 32,
15
+ "n_head": 2,
16
+ "n_inner": null,
17
+ "n_layer": 1,
18
+ "n_positions": 256,
19
+ "reorder_and_upcast_attn": false,
20
+ "resid_pdrop": 0.1,
21
+ "scale_attn_by_inverse_layer_idx": false,
22
+ "scale_attn_weights": true,
23
+ "summary_activation": null,
24
+ "summary_first_dropout": 0.1,
25
+ "summary_proj_to_labels": true,
26
+ "summary_type": "cls_index",
27
+ "summary_use_proj": true,
28
+ "transformers_version": "4.57.3",
29
+ "use_cache": true,
30
+ "vocab_size": 8
31
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.57.3"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52803f6c1157517241e17dfda3fdc94cd2b6e08c726d78c31fc24782931ec8f2
3
+ size 86344
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[BOS]",
3
+ "eos_token": "[EOS]",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": "1.0", "model": {"type": "BPE", "vocab": {"[PAD]": 0, "[BOS]": 1, "[EOS]": 2, "[UNK]": 3, "e2e4": 4, "e7e5": 5, "g1f3": 6, "b8c6": 7}, "merges": []}}
tokenizer.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from transformers import PreTrainedTokenizerFast
3
+
4
+ class ChessTokenizer(PreTrainedTokenizerFast):
5
+ def __init__(self, tokenizer_file=None, **kwargs):
6
+ super().__init__(tokenizer_file=tokenizer_file, **kwargs)
7
+ self.pad_token = "[PAD]"
8
+ self.bos_token = "[BOS]"
9
+ self.eos_token = "[EOS]"
10
+ self.unk_token = "[UNK]"
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "tokenizer_class": "ChessTokenizer",
3
+ "auto_map": {
4
+ "AutoTokenizer": [
5
+ "tokenizer.ChessTokenizer",
6
+ null
7
+ ]
8
+ },
9
+ "bos_token": "[BOS]",
10
+ "eos_token": "[EOS]",
11
+ "pad_token": "[PAD]",
12
+ "unk_token": "[UNK]"
13
+ }