Yova commited on
Commit
f89042f
·
1 Parent(s): 9a7de17

Training in progress, step 400

Browse files
Files changed (5) hide show
  1. config.json +1 -1
  2. model.safetensors +1 -1
  3. tokenizer_config.json +6 -5
  4. training_args.bin +1 -1
  5. vocab.json +1 -1
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "d_ff": 1024,
7
  "d_kv": 64,
8
  "d_model": 256,
9
- "decoder_start_token_id": 59,
10
  "dense_act_fn": "relu",
11
  "dropout_rate": 0.3,
12
  "eos_token_id": 1,
 
6
  "d_ff": 1024,
7
  "d_kv": 64,
8
  "d_model": 256,
9
+ "decoder_start_token_id": 60,
10
  "dense_act_fn": "relu",
11
  "dropout_rate": 0.3,
12
  "eos_token_id": 1,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:883b7e7c461b219fb77794521af55e394c89b590881b488ce776806ee1ac45c0
3
  size 29458520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68129fc852214b3af55abe1a13b03a1552399314b66c949616255d7cc60e0359
3
  size 29458520
tokenizer_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "added_tokens_decoder": {
3
  "59": {
4
- "content": "<s>",
5
  "lstrip": true,
6
  "normalized": true,
7
  "rstrip": true,
@@ -9,7 +9,7 @@
9
  "special": true
10
  },
11
  "60": {
12
- "content": "</s>",
13
  "lstrip": true,
14
  "normalized": true,
15
  "rstrip": true,
@@ -17,7 +17,7 @@
17
  "special": true
18
  },
19
  "61": {
20
- "content": "<unk>",
21
  "lstrip": true,
22
  "normalized": true,
23
  "rstrip": true,
@@ -25,7 +25,7 @@
25
  "special": true
26
  },
27
  "62": {
28
- "content": "<pad>",
29
  "lstrip": true,
30
  "normalized": true,
31
  "rstrip": true,
@@ -33,6 +33,7 @@
33
  "special": true
34
  }
35
  },
 
36
  "bos_token": "<s>",
37
  "clean_up_tokenization_spaces": true,
38
  "eos_token": "</s>",
@@ -40,6 +41,6 @@
40
  "max_len": 128,
41
  "model_max_length": 128,
42
  "pad_token": "<pad>",
43
- "tokenizer_class": "CustomTokenizer",
44
  "unk_token": "<unk>"
45
  }
 
1
  {
2
  "added_tokens_decoder": {
3
  "59": {
4
+ "content": "<pad>",
5
  "lstrip": true,
6
  "normalized": true,
7
  "rstrip": true,
 
9
  "special": true
10
  },
11
  "60": {
12
+ "content": "<s>",
13
  "lstrip": true,
14
  "normalized": true,
15
  "rstrip": true,
 
17
  "special": true
18
  },
19
  "61": {
20
+ "content": "</s>",
21
  "lstrip": true,
22
  "normalized": true,
23
  "rstrip": true,
 
25
  "special": true
26
  },
27
  "62": {
28
+ "content": "<unk>",
29
  "lstrip": true,
30
  "normalized": true,
31
  "rstrip": true,
 
33
  "special": true
34
  }
35
  },
36
+ "additional_special_tokens": [],
37
  "bos_token": "<s>",
38
  "clean_up_tokenization_spaces": true,
39
  "eos_token": "</s>",
 
41
  "max_len": 128,
42
  "model_max_length": 128,
43
  "pad_token": "<pad>",
44
+ "tokenizer_class": "MyByT5Tokenizer",
45
  "unk_token": "<unk>"
46
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c9c38a2116d31fbef5d13bab683bc2ab1a3f7523a477c223845c7856898a5db0
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27b1bf882e54c58f9638ee425d1a3b4d3f044a5e9ebd189884577cb3c2f4f23c
3
  size 4728
vocab.json CHANGED
@@ -1 +1 @@
1
- {"P": 0, "\u00e5": 1, "w": 2, "N": 3, "E": 4, "9": 5, "s": 6, "m": 7, "C": 8, "c": 9, "v": 10, "D": 11, "+": 12, "(": 13, "l": 14, "V": 15, "i": 16, "'": 17, "n": 18, "b": 19, "q": 20, "j": 21, " ": 22, "e": 23, "L": 24, ")": 25, "3": 26, "r": 27, "a": 28, "o": 29, "I": 30, "\u00f8": 31, "y": 32, ";": 33, "M": 34, "F": 35, "O": 36, "J": 37, "t": 38, "\u00e9": 39, "f": 40, ".": 41, "h": 42, "S": 43, "p": 44, "x": 45, "-": 46, "g": 47, "z": 48, "6": 49, "k": 50, "d": 51, "T": 52, "u": 53, "A": 54, "U": 55, "R": 56, "\u00e6": 57, "G": 58, "<s>": 59, "</s>": 60, "<unk>": 61, "<pad>": 62}
 
1
+ {"S": 0, "6": 1, "y": 2, "\u00e6": 3, "I": 4, "J": 5, "e": 6, ".": 7, "c": 8, "R": 9, ";": 10, "r": 11, "N": 12, "p": 13, "j": 14, "G": 15, "\u00e5": 16, " ": 17, "U": 18, "'": 19, "9": 20, "-": 21, "+": 22, "M": 23, "f": 24, "q": 25, "t": 26, "h": 27, "P": 28, ")": 29, "o": 30, "A": 31, "D": 32, "F": 33, "\u00e9": 34, "v": 35, "a": 36, "m": 37, "g": 38, "z": 39, "E": 40, "w": 41, "x": 42, "n": 43, "3": 44, "O": 45, "T": 46, "k": 47, "l": 48, "i": 49, "s": 50, "u": 51, "(": 52, "L": 53, "b": 54, "V": 55, "d": 56, "C": 57, "\u00f8": 58}