Yova commited on
Commit
7d2ff79
·
1 Parent(s): 9628297

Training in progress, step 400

Browse files
config.json CHANGED
@@ -3,9 +3,9 @@
3
  "T5ForConditionalGeneration"
4
  ],
5
  "classifier_dropout": 0.0,
6
- "d_ff": 256,
7
  "d_kv": 64,
8
- "d_model": 32,
9
  "decoder_start_token_id": 55,
10
  "dense_act_fn": "relu",
11
  "dropout_rate": 0.3,
@@ -16,9 +16,9 @@
16
  "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-06,
18
  "model_type": "t5",
19
- "num_decoder_layers": 2,
20
- "num_heads": 2,
21
- "num_layers": 2,
22
  "pad_token_id": 0,
23
  "relative_attention_max_distance": 128,
24
  "relative_attention_num_buckets": 32,
 
3
  "T5ForConditionalGeneration"
4
  ],
5
  "classifier_dropout": 0.0,
6
+ "d_ff": 1024,
7
  "d_kv": 64,
8
+ "d_model": 256,
9
  "decoder_start_token_id": 55,
10
  "dense_act_fn": "relu",
11
  "dropout_rate": 0.3,
 
16
  "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-06,
18
  "model_type": "t5",
19
+ "num_decoder_layers": 4,
20
+ "num_heads": 4,
21
+ "num_layers": 4,
22
  "pad_token_id": 0,
23
  "relative_attention_max_distance": 128,
24
  "relative_attention_num_buckets": 32,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5102792e883d5acfe335d519ae51a513811effd16b8f7759623f405968de6cd3
3
- size 670200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a42de00ceb02250f18280babab04d47636a91dbff283d1a1e4b6710c5412d5d
3
+ size 29454424
runs/Nov28_12-30-09_d7c5ae20caf1/events.out.tfevents.1701174618.d7c5ae20caf1.3157.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3771f0759d4cddab4b306df8ceb17fba3ea5f1d04535f99615c8123071016863
3
+ size 4791
special_tokens_map.json CHANGED
@@ -1,30 +1,30 @@
1
  {
2
  "bos_token": {
3
  "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
  "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
  "content": "<pad>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
  "content": "<unk>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
  "single_word": false
29
  }
30
  }
 
1
  {
2
  "bos_token": {
3
  "content": "<s>",
4
+ "lstrip": true,
5
+ "normalized": true,
6
+ "rstrip": true,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
  "content": "</s>",
11
+ "lstrip": true,
12
+ "normalized": true,
13
+ "rstrip": true,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
  "content": "<pad>",
18
+ "lstrip": true,
19
+ "normalized": true,
20
+ "rstrip": true,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
  "content": "<unk>",
25
+ "lstrip": true,
26
+ "normalized": true,
27
+ "rstrip": true,
28
  "single_word": false
29
  }
30
  }
tokenizer_config.json CHANGED
@@ -2,33 +2,33 @@
2
  "added_tokens_decoder": {
3
  "55": {
4
  "content": "<s>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
  "56": {
12
  "content": "</s>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
  "57": {
20
  "content": "<unk>",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  },
27
  "58": {
28
  "content": "<pad>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
  "single_word": false,
33
  "special": true
34
  }
@@ -36,9 +36,10 @@
36
  "bos_token": "<s>",
37
  "clean_up_tokenization_spaces": true,
38
  "eos_token": "</s>",
 
39
  "max_len": 128,
40
  "model_max_length": 128,
41
  "pad_token": "<pad>",
42
- "tokenizer_class": "FixedVocabTokenizer",
43
  "unk_token": "<unk>"
44
  }
 
2
  "added_tokens_decoder": {
3
  "55": {
4
  "content": "<s>",
5
+ "lstrip": true,
6
+ "normalized": true,
7
+ "rstrip": true,
8
  "single_word": false,
9
  "special": true
10
  },
11
  "56": {
12
  "content": "</s>",
13
+ "lstrip": true,
14
+ "normalized": true,
15
+ "rstrip": true,
16
  "single_word": false,
17
  "special": true
18
  },
19
  "57": {
20
  "content": "<unk>",
21
+ "lstrip": true,
22
+ "normalized": true,
23
+ "rstrip": true,
24
  "single_word": false,
25
  "special": true
26
  },
27
  "58": {
28
  "content": "<pad>",
29
+ "lstrip": true,
30
+ "normalized": true,
31
+ "rstrip": true,
32
  "single_word": false,
33
  "special": true
34
  }
 
36
  "bos_token": "<s>",
37
  "clean_up_tokenization_spaces": true,
38
  "eos_token": "</s>",
39
+ "extra_ids": 0,
40
  "max_len": 128,
41
  "model_max_length": 128,
42
  "pad_token": "<pad>",
43
+ "tokenizer_class": "CustomTokenizer",
44
  "unk_token": "<unk>"
45
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:629b4114042edbf424ed66b9cafd910a1b227379ef25384e2b86246ccc3fa275
3
- size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5c85c88665e8405b7ec073e5f6c58b51d66afe10e3ed8bf4f7f3cf841348dd8
3
+ size 4792
vocab.json CHANGED
@@ -1 +1 @@
1
- {"F": 0, "G": 1, ";": 2, "\u00ed": 3, "y": 4, " ": 5, "V": 6, "L": 7, "j": 8, "N": 9, "\u00e9": 10, "l": 11, "n": 12, "c": 13, "M": 14, "3": 15, "4": 16, "I": 17, "\u0142": 18, "w": 19, "\u01eb": 20, "a": 21, "s": 22, "R": 23, "\u0301": 24, "m": 25, "z": 26, "O": 27, "\u012f": 28, "(": 29, "d": 30, "h": 31, "b": 32, "\u0105": 33, ",": 34, "P": 35, ")": 36, "o": 37, "t": 38, "T": 39, "2": 40, "\u00f3": 41, "k": 42, "E": 43, "\u02bc": 44, "S": 45, "e": 46, "\u00e1": 47, "\u0144": 48, "g": 49, "x": 50, "D": 51, "\u0119": 52, "i": 53, "1": 54, "<s>": 55, "</s>": 56, "<unk>": 57, "<pad>": 58}
 
1
+ {"w": 0, "l": 1, ",": 2, "x": 3, "\u00ed": 4, "d": 5, "\u00e9": 6, "g": 7, "\u01eb": 8, "T": 9, "N": 10, "s": 11, ")": 12, "I": 13, "j": 14, "\u0142": 15, "\u0144": 16, " ": 17, "E": 18, "\u00e1": 19, "L": 20, "D": 21, "z": 22, "M": 23, "y": 24, "k": 25, "a": 26, "1": 27, "i": 28, "P": 29, "\u012f": 30, "S": 31, "3": 32, "4": 33, "c": 34, "V": 35, "\u0105": 36, "t": 37, "b": 38, "e": 39, "\u02bc": 40, "n": 41, "O": 42, "\u0301": 43, "h": 44, "m": 45, "R": 46, "G": 47, ";": 48, "(": 49, "2": 50, "F": 51, "\u0119": 52, "\u00f3": 53, "o": 54, "<s>": 55, "</s>": 56, "<unk>": 57, "<pad>": 58}