MarkGG commited on
Commit
9a77cc1
·
1 Parent(s): 0cd427e

End of training

Browse files
added_tokens.json CHANGED
@@ -1,104 +1,106 @@
1
  {
2
- "[EX0]": 41063,
3
- "[EX10]": 41073,
4
- "[EX11]": 41074,
5
- "[EX12]": 41075,
6
- "[EX13]": 41076,
7
- "[EX14]": 41077,
8
- "[EX15]": 41078,
9
- "[EX16]": 41079,
10
- "[EX17]": 41080,
11
- "[EX18]": 41081,
12
- "[EX19]": 41082,
13
- "[EX1]": 41064,
14
- "[EX20]": 41083,
15
- "[EX21]": 41084,
16
- "[EX22]": 41085,
17
- "[EX23]": 41086,
18
- "[EX24]": 41087,
19
- "[EX25]": 41088,
20
- "[EX26]": 41089,
21
- "[EX27]": 41090,
22
- "[EX28]": 41091,
23
- "[EX29]": 41092,
24
- "[EX2]": 41065,
25
- "[EX30]": 41093,
26
- "[EX31]": 41094,
27
- "[EX32]": 41095,
28
- "[EX33]": 41096,
29
- "[EX34]": 41097,
30
- "[EX35]": 41098,
31
- "[EX36]": 41099,
32
- "[EX37]": 41100,
33
- "[EX38]": 41101,
34
- "[EX39]": 41102,
35
- "[EX3]": 41066,
36
- "[EX40]": 41103,
37
- "[EX41]": 41104,
38
- "[EX42]": 41105,
39
- "[EX43]": 41106,
40
- "[EX44]": 41107,
41
- "[EX45]": 41108,
42
- "[EX46]": 41109,
43
- "[EX47]": 41110,
44
- "[EX48]": 41111,
45
- "[EX49]": 41112,
46
- "[EX4]": 41067,
47
- "[EX50]": 41113,
48
- "[EX51]": 41114,
49
- "[EX52]": 41115,
50
- "[EX53]": 41116,
51
- "[EX54]": 41117,
52
- "[EX55]": 41118,
53
- "[EX56]": 41119,
54
- "[EX57]": 41120,
55
- "[EX58]": 41121,
56
- "[EX59]": 41122,
57
- "[EX5]": 41068,
58
- "[EX60]": 41123,
59
- "[EX61]": 41124,
60
- "[EX62]": 41125,
61
- "[EX63]": 41126,
62
- "[EX64]": 41127,
63
- "[EX65]": 41128,
64
- "[EX66]": 41129,
65
- "[EX67]": 41130,
66
- "[EX68]": 41131,
67
- "[EX69]": 41132,
68
- "[EX6]": 41069,
69
- "[EX70]": 41133,
70
- "[EX71]": 41134,
71
- "[EX72]": 41135,
72
- "[EX73]": 41136,
73
- "[EX74]": 41137,
74
- "[EX75]": 41138,
75
- "[EX76]": 41139,
76
- "[EX77]": 41140,
77
- "[EX78]": 41141,
78
- "[EX79]": 41142,
79
- "[EX7]": 41070,
80
- "[EX80]": 41143,
81
- "[EX81]": 41144,
82
- "[EX82]": 41145,
83
- "[EX83]": 41146,
84
- "[EX84]": 41147,
85
- "[EX85]": 41148,
86
- "[EX86]": 41149,
87
- "[EX87]": 41150,
88
- "[EX88]": 41151,
89
- "[EX89]": 41152,
90
- "[EX8]": 41071,
91
- "[EX90]": 41153,
92
- "[EX91]": 41154,
93
- "[EX92]": 41155,
94
- "[EX93]": 41156,
95
- "[EX94]": 41157,
96
- "[EX95]": 41158,
97
- "[EX96]": 41159,
98
- "[EX97]": 41160,
99
- "[EX98]": 41161,
100
- "[EX99]": 41162,
101
- "[EX9]": 41072,
102
- "[FRL]": 41061,
103
- "[MRL]": 41062
 
 
104
  }
 
1
  {
2
+ "[ex0]": 31972,
3
+ "[ex10]": 31982,
4
+ "[ex11]": 31983,
5
+ "[ex12]": 31984,
6
+ "[ex13]": 31985,
7
+ "[ex14]": 31986,
8
+ "[ex15]": 31987,
9
+ "[ex16]": 31988,
10
+ "[ex17]": 31989,
11
+ "[ex18]": 31990,
12
+ "[ex19]": 31991,
13
+ "[ex1]": 31973,
14
+ "[ex20]": 31992,
15
+ "[ex21]": 31993,
16
+ "[ex22]": 31994,
17
+ "[ex23]": 31995,
18
+ "[ex24]": 31996,
19
+ "[ex25]": 31997,
20
+ "[ex26]": 31998,
21
+ "[ex27]": 31999,
22
+ "[ex28]": 32000,
23
+ "[ex29]": 32001,
24
+ "[ex2]": 31974,
25
+ "[ex30]": 32002,
26
+ "[ex31]": 32003,
27
+ "[ex32]": 32004,
28
+ "[ex33]": 32005,
29
+ "[ex34]": 32006,
30
+ "[ex35]": 32007,
31
+ "[ex36]": 32008,
32
+ "[ex37]": 32009,
33
+ "[ex38]": 32010,
34
+ "[ex39]": 32011,
35
+ "[ex3]": 31975,
36
+ "[ex40]": 32012,
37
+ "[ex41]": 32013,
38
+ "[ex42]": 32014,
39
+ "[ex43]": 32015,
40
+ "[ex44]": 32016,
41
+ "[ex45]": 32017,
42
+ "[ex46]": 32018,
43
+ "[ex47]": 32019,
44
+ "[ex48]": 32020,
45
+ "[ex49]": 32021,
46
+ "[ex4]": 31976,
47
+ "[ex50]": 32022,
48
+ "[ex51]": 32023,
49
+ "[ex52]": 32024,
50
+ "[ex53]": 32025,
51
+ "[ex54]": 32026,
52
+ "[ex55]": 32027,
53
+ "[ex56]": 32028,
54
+ "[ex57]": 32029,
55
+ "[ex58]": 32030,
56
+ "[ex59]": 32031,
57
+ "[ex5]": 31977,
58
+ "[ex60]": 32032,
59
+ "[ex61]": 32033,
60
+ "[ex62]": 32034,
61
+ "[ex63]": 32035,
62
+ "[ex64]": 32036,
63
+ "[ex65]": 32037,
64
+ "[ex66]": 32038,
65
+ "[ex67]": 32039,
66
+ "[ex68]": 32040,
67
+ "[ex69]": 32041,
68
+ "[ex6]": 31978,
69
+ "[ex70]": 32042,
70
+ "[ex71]": 32043,
71
+ "[ex72]": 32044,
72
+ "[ex73]": 32045,
73
+ "[ex74]": 32046,
74
+ "[ex75]": 32047,
75
+ "[ex76]": 32048,
76
+ "[ex77]": 32049,
77
+ "[ex78]": 32050,
78
+ "[ex79]": 32051,
79
+ "[ex7]": 31979,
80
+ "[ex80]": 32052,
81
+ "[ex81]": 32053,
82
+ "[ex82]": 32054,
83
+ "[ex83]": 32055,
84
+ "[ex84]": 32056,
85
+ "[ex85]": 32057,
86
+ "[ex86]": 32058,
87
+ "[ex87]": 32059,
88
+ "[ex88]": 32060,
89
+ "[ex89]": 32061,
90
+ "[ex8]": 31980,
91
+ "[ex90]": 32062,
92
+ "[ex91]": 32063,
93
+ "[ex92]": 32064,
94
+ "[ex93]": 32065,
95
+ "[ex94]": 32066,
96
+ "[ex95]": 32067,
97
+ "[ex96]": 32068,
98
+ "[ex97]": 32069,
99
+ "[ex98]": 32070,
100
+ "[ex99]": 32071,
101
+ "[ex9]": 31981,
102
+ "[frl]": 31970,
103
+ "[mrl]": 31971,
104
+ "ext.": 32072,
105
+ "int.": 32073
106
  }
config.json CHANGED
@@ -35,5 +35,5 @@
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.23.1",
37
  "use_cache": true,
38
- "vocab_size": 41163
39
  }
 
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.23.1",
37
  "use_cache": true,
38
+ "vocab_size": 32074
39
  }
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e193f0ce3bd689a33f19cde4fd1d2697d9fe7cb5a88428701397c1dceb29f4e
3
- size 482459753
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4838407241c8bb8b972332dcf06218c3837f43458f9f78cc1a5650d244729cf
3
+ size 454538345
runs/Oct29_06-07-34_7de124e32abe/1667023834.3569448/events.out.tfevents.1667023834.7de124e32abe.77.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7f014d58a702d9fef71c228fc92b6ace415fe21fc2cba7979908c9caa80856b
3
+ size 5472
runs/Oct29_06-07-34_7de124e32abe/events.out.tfevents.1667023834.7de124e32abe.77.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ceaa2cba2a9b3e4e61d4ed0c1bf9b656e8c1f6cff84f6d514e350298c69e1a08
3
+ size 4284
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
  "model_max_length": 1024,
6
- "name_or_path": "MarkGG/Romance-cleaned-2",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
  "unk_token": "<|endoftext|>"
 
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
  "model_max_length": 1024,
6
+ "name_or_path": "MarkGG/Romance-cleaned-3",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
  "unk_token": "<|endoftext|>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0a214dbc40bca978d40429e6d19c987f9c0679b98fd31dfd900629991bbb9e71
3
  size 3375
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c3ad90ec882ffb7e5112c711500f71d9cb5c2d37e31d64427840bbede4b0b01
3
  size 3375
vocab.json CHANGED
The diff for this file is too large to render. See raw diff