smithblack-0 commited on
Commit
6c19086
·
verified ·
1 Parent(s): 25acc1b

Upload folder using huggingface_hub

Browse files
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/metrics.json ADDED
The diff for this file is too large to render. See raw diff
 
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8b19cbe01e19627dfca417d17c7d4c6148f42a1eb730fb37ac599270def0fcb
3
+ size 140928199
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85c2b70e009a05a705cf8fdaabfd108a39fa562a8475190df7c1cb84c0423cea
3
+ size 281938993
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/random_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b88732d3dfc1672779e438778d8cead796ad1888ff56846c8aa96cb759e89294
3
+ size 13655
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/tokenizer/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/tokenizer/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ }
12
+ },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": false,
15
+ "eos_token": "<|endoftext|>",
16
+ "extra_special_tokens": {},
17
+ "model_max_length": 1024,
18
+ "pad_token": "<|endoftext|>",
19
+ "tokenizer_class": "GPT2Tokenizer",
20
+ "unk_token": "<|endoftext|>"
21
+ }
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/tokenizer/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
regularization-sweep-v1/conlinear_s42_wd0.0_do0.0_gdo0.1_dsTrue/checkpoint_005000/training_state.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "global_batch": 5000,
3
+ "effective_batch": 5000,
4
+ "miniepoch_num": 5
5
+ }