Mango-Juice commited on
Commit
568b908
·
verified ·
1 Parent(s): aebe139

Upload 12 files

Browse files
README.md ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: mit
4
+ base_model: microsoft/deberta-v3-large
5
+ tags:
6
+ - generated_from_trainer
7
+ model-index:
8
+ - name: mlm_output
9
+ results: []
10
+ ---
11
+
12
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
13
+ should probably proofread and complete it, then remove this comment. -->
14
+
15
+ # mlm_output
16
+
17
+ This model is a fine-tuned version of [microsoft/deberta-v3-large](https://huggingface.co/microsoft/deberta-v3-large) on an unknown dataset.
18
+
19
+ ## Model description
20
+
21
+ More information needed
22
+
23
+ ## Intended uses & limitations
24
+
25
+ More information needed
26
+
27
+ ## Training and evaluation data
28
+
29
+ More information needed
30
+
31
+ ## Training procedure
32
+
33
+ ### Training hyperparameters
34
+
35
+ The following hyperparameters were used during training:
36
+ - learning_rate: 5e-05
37
+ - train_batch_size: 16
38
+ - eval_batch_size: 8
39
+ - seed: 42
40
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
41
+ - lr_scheduler_type: linear
42
+ - num_epochs: 3.0
43
+ - mixed_precision_training: Native AMP
44
+
45
+ ### Training results
46
+
47
+
48
+
49
+ ### Framework versions
50
+
51
+ - Transformers 4.52.0
52
+ - Pytorch 2.6.0+cu124
53
+ - Datasets 2.19.0
54
+ - Tokenizers 0.21.1
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
all_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "total_flos": 4.206282006594048e+16,
4
+ "train_loss": 3.0830205624302227,
5
+ "train_runtime": 6816.3036,
6
+ "train_samples": 163840,
7
+ "train_samples_per_second": 72.109,
8
+ "train_steps_per_second": 4.507
9
+ }
config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "DebertaV2ForMaskedLM"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "hidden_act": "gelu",
7
+ "hidden_dropout_prob": 0.1,
8
+ "hidden_size": 1024,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 4096,
11
+ "layer_norm_eps": 1e-07,
12
+ "legacy": true,
13
+ "max_position_embeddings": 512,
14
+ "max_relative_positions": -1,
15
+ "model_type": "deberta-v2",
16
+ "norm_rel_ebd": "layer_norm",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 24,
19
+ "pad_token_id": 0,
20
+ "pooler_dropout": 0,
21
+ "pooler_hidden_act": "gelu",
22
+ "pooler_hidden_size": 1024,
23
+ "pos_att_type": [
24
+ "p2c",
25
+ "c2p"
26
+ ],
27
+ "position_biased_input": false,
28
+ "position_buckets": 256,
29
+ "relative_attention": true,
30
+ "share_att_key": true,
31
+ "torch_dtype": "float32",
32
+ "transformers_version": "4.52.0",
33
+ "type_vocab_size": 0,
34
+ "vocab_size": 128100
35
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c413b90a021dd2c1c7ec4b358b012c510e8eaca43be87a4fe4a22650db249dca
3
+ size 1740817016
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[UNK]",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128000": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "[CLS]",
45
+ "clean_up_tokenization_spaces": false,
46
+ "cls_token": "[CLS]",
47
+ "do_lower_case": false,
48
+ "eos_token": "[SEP]",
49
+ "extra_special_tokens": {},
50
+ "mask_token": "[MASK]",
51
+ "model_max_length": 1000000000000000019884624838656,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "sp_model_kwargs": {},
55
+ "split_by_punct": false,
56
+ "tokenizer_class": "DebertaV2Tokenizer",
57
+ "unk_token": "[UNK]",
58
+ "vocab_type": "spm"
59
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "total_flos": 4.206282006594048e+16,
4
+ "train_loss": 3.0830205624302227,
5
+ "train_runtime": 6816.3036,
6
+ "train_samples": 163840,
7
+ "train_samples_per_second": 72.109,
8
+ "train_steps_per_second": 4.507
9
+ }
trainer_state.json ADDED
@@ -0,0 +1,2192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 3.0,
6
+ "eval_steps": 500,
7
+ "global_step": 30720,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.009765625,
14
+ "grad_norm": NaN,
15
+ "learning_rate": 4.984375e-05,
16
+ "loss": 6.5469,
17
+ "step": 100
18
+ },
19
+ {
20
+ "epoch": 0.01953125,
21
+ "grad_norm": 15.062654495239258,
22
+ "learning_rate": 4.9682617187500003e-05,
23
+ "loss": 5.3177,
24
+ "step": 200
25
+ },
26
+ {
27
+ "epoch": 0.029296875,
28
+ "grad_norm": 13.20622730255127,
29
+ "learning_rate": 4.9519856770833334e-05,
30
+ "loss": 4.8951,
31
+ "step": 300
32
+ },
33
+ {
34
+ "epoch": 0.0390625,
35
+ "grad_norm": 14.794780731201172,
36
+ "learning_rate": 4.935709635416667e-05,
37
+ "loss": 4.8799,
38
+ "step": 400
39
+ },
40
+ {
41
+ "epoch": 0.048828125,
42
+ "grad_norm": 13.915122985839844,
43
+ "learning_rate": 4.919596354166667e-05,
44
+ "loss": 4.5381,
45
+ "step": 500
46
+ },
47
+ {
48
+ "epoch": 0.05859375,
49
+ "grad_norm": 15.835251808166504,
50
+ "learning_rate": 4.9033203125000005e-05,
51
+ "loss": 4.4279,
52
+ "step": 600
53
+ },
54
+ {
55
+ "epoch": 0.068359375,
56
+ "grad_norm": 14.109216690063477,
57
+ "learning_rate": 4.8870442708333335e-05,
58
+ "loss": 4.2612,
59
+ "step": 700
60
+ },
61
+ {
62
+ "epoch": 0.078125,
63
+ "grad_norm": 13.910305976867676,
64
+ "learning_rate": 4.8707682291666666e-05,
65
+ "loss": 4.2912,
66
+ "step": 800
67
+ },
68
+ {
69
+ "epoch": 0.087890625,
70
+ "grad_norm": 15.284989356994629,
71
+ "learning_rate": 4.854654947916667e-05,
72
+ "loss": 4.3003,
73
+ "step": 900
74
+ },
75
+ {
76
+ "epoch": 0.09765625,
77
+ "grad_norm": 16.042747497558594,
78
+ "learning_rate": 4.83837890625e-05,
79
+ "loss": 4.0609,
80
+ "step": 1000
81
+ },
82
+ {
83
+ "epoch": 0.107421875,
84
+ "grad_norm": 12.1378755569458,
85
+ "learning_rate": 4.8221028645833336e-05,
86
+ "loss": 4.1666,
87
+ "step": 1100
88
+ },
89
+ {
90
+ "epoch": 0.1171875,
91
+ "grad_norm": 13.27160930633545,
92
+ "learning_rate": 4.805826822916667e-05,
93
+ "loss": 4.1419,
94
+ "step": 1200
95
+ },
96
+ {
97
+ "epoch": 0.126953125,
98
+ "grad_norm": 16.022241592407227,
99
+ "learning_rate": 4.78955078125e-05,
100
+ "loss": 4.1586,
101
+ "step": 1300
102
+ },
103
+ {
104
+ "epoch": 0.13671875,
105
+ "grad_norm": 17.19588851928711,
106
+ "learning_rate": 4.7732747395833336e-05,
107
+ "loss": 3.9012,
108
+ "step": 1400
109
+ },
110
+ {
111
+ "epoch": 0.146484375,
112
+ "grad_norm": 13.319019317626953,
113
+ "learning_rate": 4.756998697916667e-05,
114
+ "loss": 4.017,
115
+ "step": 1500
116
+ },
117
+ {
118
+ "epoch": 0.15625,
119
+ "grad_norm": 18.409399032592773,
120
+ "learning_rate": 4.74072265625e-05,
121
+ "loss": 3.9484,
122
+ "step": 1600
123
+ },
124
+ {
125
+ "epoch": 0.166015625,
126
+ "grad_norm": 13.017057418823242,
127
+ "learning_rate": 4.7244466145833336e-05,
128
+ "loss": 3.9405,
129
+ "step": 1700
130
+ },
131
+ {
132
+ "epoch": 0.17578125,
133
+ "grad_norm": 20.20134162902832,
134
+ "learning_rate": 4.7081705729166667e-05,
135
+ "loss": 3.8163,
136
+ "step": 1800
137
+ },
138
+ {
139
+ "epoch": 0.185546875,
140
+ "grad_norm": 12.69709587097168,
141
+ "learning_rate": 4.6918945312500004e-05,
142
+ "loss": 3.8031,
143
+ "step": 1900
144
+ },
145
+ {
146
+ "epoch": 0.1953125,
147
+ "grad_norm": 15.624823570251465,
148
+ "learning_rate": 4.6756184895833335e-05,
149
+ "loss": 3.862,
150
+ "step": 2000
151
+ },
152
+ {
153
+ "epoch": 0.205078125,
154
+ "grad_norm": 18.287826538085938,
155
+ "learning_rate": 4.659342447916667e-05,
156
+ "loss": 3.8113,
157
+ "step": 2100
158
+ },
159
+ {
160
+ "epoch": 0.21484375,
161
+ "grad_norm": 13.958403587341309,
162
+ "learning_rate": 4.6430664062500004e-05,
163
+ "loss": 3.7175,
164
+ "step": 2200
165
+ },
166
+ {
167
+ "epoch": 0.224609375,
168
+ "grad_norm": 14.171894073486328,
169
+ "learning_rate": 4.6267903645833335e-05,
170
+ "loss": 3.8023,
171
+ "step": 2300
172
+ },
173
+ {
174
+ "epoch": 0.234375,
175
+ "grad_norm": 13.721501350402832,
176
+ "learning_rate": 4.610514322916667e-05,
177
+ "loss": 3.7811,
178
+ "step": 2400
179
+ },
180
+ {
181
+ "epoch": 0.244140625,
182
+ "grad_norm": 15.861538887023926,
183
+ "learning_rate": 4.5942382812500003e-05,
184
+ "loss": 3.6891,
185
+ "step": 2500
186
+ },
187
+ {
188
+ "epoch": 0.25390625,
189
+ "grad_norm": 12.93659496307373,
190
+ "learning_rate": 4.577962239583334e-05,
191
+ "loss": 3.6613,
192
+ "step": 2600
193
+ },
194
+ {
195
+ "epoch": 0.263671875,
196
+ "grad_norm": 14.38327407836914,
197
+ "learning_rate": 4.561686197916667e-05,
198
+ "loss": 4.0671,
199
+ "step": 2700
200
+ },
201
+ {
202
+ "epoch": 0.2734375,
203
+ "grad_norm": 21.85599136352539,
204
+ "learning_rate": 4.54541015625e-05,
205
+ "loss": 3.6713,
206
+ "step": 2800
207
+ },
208
+ {
209
+ "epoch": 0.283203125,
210
+ "grad_norm": 16.765880584716797,
211
+ "learning_rate": 4.529134114583334e-05,
212
+ "loss": 3.787,
213
+ "step": 2900
214
+ },
215
+ {
216
+ "epoch": 0.29296875,
217
+ "grad_norm": 12.277482032775879,
218
+ "learning_rate": 4.512858072916667e-05,
219
+ "loss": 3.634,
220
+ "step": 3000
221
+ },
222
+ {
223
+ "epoch": 0.302734375,
224
+ "grad_norm": 14.076525688171387,
225
+ "learning_rate": 4.49658203125e-05,
226
+ "loss": 3.6676,
227
+ "step": 3100
228
+ },
229
+ {
230
+ "epoch": 0.3125,
231
+ "grad_norm": 54.203609466552734,
232
+ "learning_rate": 4.480305989583334e-05,
233
+ "loss": 3.4946,
234
+ "step": 3200
235
+ },
236
+ {
237
+ "epoch": 0.322265625,
238
+ "grad_norm": 17.947622299194336,
239
+ "learning_rate": 4.464029947916667e-05,
240
+ "loss": 3.6398,
241
+ "step": 3300
242
+ },
243
+ {
244
+ "epoch": 0.33203125,
245
+ "grad_norm": 12.46594524383545,
246
+ "learning_rate": 4.44775390625e-05,
247
+ "loss": 3.6395,
248
+ "step": 3400
249
+ },
250
+ {
251
+ "epoch": 0.341796875,
252
+ "grad_norm": 14.41441822052002,
253
+ "learning_rate": 4.431477864583334e-05,
254
+ "loss": 3.5954,
255
+ "step": 3500
256
+ },
257
+ {
258
+ "epoch": 0.3515625,
259
+ "grad_norm": 11.247319221496582,
260
+ "learning_rate": 4.415201822916667e-05,
261
+ "loss": 3.7537,
262
+ "step": 3600
263
+ },
264
+ {
265
+ "epoch": 0.361328125,
266
+ "grad_norm": 15.799861907958984,
267
+ "learning_rate": 4.39892578125e-05,
268
+ "loss": 3.7104,
269
+ "step": 3700
270
+ },
271
+ {
272
+ "epoch": 0.37109375,
273
+ "grad_norm": 13.855748176574707,
274
+ "learning_rate": 4.382649739583334e-05,
275
+ "loss": 3.6212,
276
+ "step": 3800
277
+ },
278
+ {
279
+ "epoch": 0.380859375,
280
+ "grad_norm": 14.83035659790039,
281
+ "learning_rate": 4.366373697916667e-05,
282
+ "loss": 3.5802,
283
+ "step": 3900
284
+ },
285
+ {
286
+ "epoch": 0.390625,
287
+ "grad_norm": 13.719660758972168,
288
+ "learning_rate": 4.35009765625e-05,
289
+ "loss": 3.4662,
290
+ "step": 4000
291
+ },
292
+ {
293
+ "epoch": 0.400390625,
294
+ "grad_norm": NaN,
295
+ "learning_rate": 4.333821614583334e-05,
296
+ "loss": 3.5461,
297
+ "step": 4100
298
+ },
299
+ {
300
+ "epoch": 0.41015625,
301
+ "grad_norm": 13.233614921569824,
302
+ "learning_rate": 4.3177083333333334e-05,
303
+ "loss": 3.5612,
304
+ "step": 4200
305
+ },
306
+ {
307
+ "epoch": 0.419921875,
308
+ "grad_norm": 16.629112243652344,
309
+ "learning_rate": 4.3014322916666665e-05,
310
+ "loss": 3.4333,
311
+ "step": 4300
312
+ },
313
+ {
314
+ "epoch": 0.4296875,
315
+ "grad_norm": 84.17444610595703,
316
+ "learning_rate": 4.28515625e-05,
317
+ "loss": 3.4819,
318
+ "step": 4400
319
+ },
320
+ {
321
+ "epoch": 0.439453125,
322
+ "grad_norm": 14.212441444396973,
323
+ "learning_rate": 4.268880208333333e-05,
324
+ "loss": 3.4069,
325
+ "step": 4500
326
+ },
327
+ {
328
+ "epoch": 0.44921875,
329
+ "grad_norm": 20.573959350585938,
330
+ "learning_rate": 4.2526041666666664e-05,
331
+ "loss": 3.6456,
332
+ "step": 4600
333
+ },
334
+ {
335
+ "epoch": 0.458984375,
336
+ "grad_norm": 13.834342956542969,
337
+ "learning_rate": 4.236328125e-05,
338
+ "loss": 3.5382,
339
+ "step": 4700
340
+ },
341
+ {
342
+ "epoch": 0.46875,
343
+ "grad_norm": 14.079811096191406,
344
+ "learning_rate": 4.220052083333333e-05,
345
+ "loss": 3.4105,
346
+ "step": 4800
347
+ },
348
+ {
349
+ "epoch": 0.478515625,
350
+ "grad_norm": 12.509760856628418,
351
+ "learning_rate": 4.2037760416666664e-05,
352
+ "loss": 3.3461,
353
+ "step": 4900
354
+ },
355
+ {
356
+ "epoch": 0.48828125,
357
+ "grad_norm": 14.564861297607422,
358
+ "learning_rate": 4.1875e-05,
359
+ "loss": 3.4041,
360
+ "step": 5000
361
+ },
362
+ {
363
+ "epoch": 0.498046875,
364
+ "grad_norm": 14.737241744995117,
365
+ "learning_rate": 4.171223958333333e-05,
366
+ "loss": 3.507,
367
+ "step": 5100
368
+ },
369
+ {
370
+ "epoch": 0.5078125,
371
+ "grad_norm": 15.188499450683594,
372
+ "learning_rate": 4.154947916666666e-05,
373
+ "loss": 3.4917,
374
+ "step": 5200
375
+ },
376
+ {
377
+ "epoch": 0.517578125,
378
+ "grad_norm": 12.736299514770508,
379
+ "learning_rate": 4.138671875e-05,
380
+ "loss": 3.3674,
381
+ "step": 5300
382
+ },
383
+ {
384
+ "epoch": 0.52734375,
385
+ "grad_norm": 13.326681137084961,
386
+ "learning_rate": 4.122395833333333e-05,
387
+ "loss": 3.4286,
388
+ "step": 5400
389
+ },
390
+ {
391
+ "epoch": 0.537109375,
392
+ "grad_norm": 12.76373291015625,
393
+ "learning_rate": 4.106119791666666e-05,
394
+ "loss": 3.4353,
395
+ "step": 5500
396
+ },
397
+ {
398
+ "epoch": 0.546875,
399
+ "grad_norm": 15.256406784057617,
400
+ "learning_rate": 4.08984375e-05,
401
+ "loss": 3.4297,
402
+ "step": 5600
403
+ },
404
+ {
405
+ "epoch": 0.556640625,
406
+ "grad_norm": 11.880370140075684,
407
+ "learning_rate": 4.073567708333333e-05,
408
+ "loss": 3.2847,
409
+ "step": 5700
410
+ },
411
+ {
412
+ "epoch": 0.56640625,
413
+ "grad_norm": 15.305662155151367,
414
+ "learning_rate": 4.057291666666667e-05,
415
+ "loss": 3.3476,
416
+ "step": 5800
417
+ },
418
+ {
419
+ "epoch": 0.576171875,
420
+ "grad_norm": 24.128557205200195,
421
+ "learning_rate": 4.041015625e-05,
422
+ "loss": 3.4455,
423
+ "step": 5900
424
+ },
425
+ {
426
+ "epoch": 0.5859375,
427
+ "grad_norm": 13.900318145751953,
428
+ "learning_rate": 4.024739583333334e-05,
429
+ "loss": 3.4846,
430
+ "step": 6000
431
+ },
432
+ {
433
+ "epoch": 0.595703125,
434
+ "grad_norm": 11.240140914916992,
435
+ "learning_rate": 4.008463541666667e-05,
436
+ "loss": 3.4453,
437
+ "step": 6100
438
+ },
439
+ {
440
+ "epoch": 0.60546875,
441
+ "grad_norm": 14.209588050842285,
442
+ "learning_rate": 3.9921875e-05,
443
+ "loss": 3.3685,
444
+ "step": 6200
445
+ },
446
+ {
447
+ "epoch": 0.615234375,
448
+ "grad_norm": 16.545690536499023,
449
+ "learning_rate": 3.975911458333334e-05,
450
+ "loss": 3.3857,
451
+ "step": 6300
452
+ },
453
+ {
454
+ "epoch": 0.625,
455
+ "grad_norm": 12.255586624145508,
456
+ "learning_rate": 3.959635416666667e-05,
457
+ "loss": 3.3588,
458
+ "step": 6400
459
+ },
460
+ {
461
+ "epoch": 0.634765625,
462
+ "grad_norm": 15.788583755493164,
463
+ "learning_rate": 3.9433593750000006e-05,
464
+ "loss": 3.4022,
465
+ "step": 6500
466
+ },
467
+ {
468
+ "epoch": 0.64453125,
469
+ "grad_norm": 11.233269691467285,
470
+ "learning_rate": 3.92724609375e-05,
471
+ "loss": 3.3202,
472
+ "step": 6600
473
+ },
474
+ {
475
+ "epoch": 0.654296875,
476
+ "grad_norm": 16.585798263549805,
477
+ "learning_rate": 3.910970052083334e-05,
478
+ "loss": 3.2925,
479
+ "step": 6700
480
+ },
481
+ {
482
+ "epoch": 0.6640625,
483
+ "grad_norm": 15.305195808410645,
484
+ "learning_rate": 3.894694010416667e-05,
485
+ "loss": 3.3162,
486
+ "step": 6800
487
+ },
488
+ {
489
+ "epoch": 0.673828125,
490
+ "grad_norm": 11.933198928833008,
491
+ "learning_rate": 3.87841796875e-05,
492
+ "loss": 3.3133,
493
+ "step": 6900
494
+ },
495
+ {
496
+ "epoch": 0.68359375,
497
+ "grad_norm": 15.607390403747559,
498
+ "learning_rate": 3.862141927083334e-05,
499
+ "loss": 3.3407,
500
+ "step": 7000
501
+ },
502
+ {
503
+ "epoch": 0.693359375,
504
+ "grad_norm": 9.2060546875,
505
+ "learning_rate": 3.845865885416667e-05,
506
+ "loss": 3.3973,
507
+ "step": 7100
508
+ },
509
+ {
510
+ "epoch": 0.703125,
511
+ "grad_norm": 17.788448333740234,
512
+ "learning_rate": 3.82958984375e-05,
513
+ "loss": 3.261,
514
+ "step": 7200
515
+ },
516
+ {
517
+ "epoch": 0.712890625,
518
+ "grad_norm": 13.37771224975586,
519
+ "learning_rate": 3.813313802083334e-05,
520
+ "loss": 3.3159,
521
+ "step": 7300
522
+ },
523
+ {
524
+ "epoch": 0.72265625,
525
+ "grad_norm": 13.427688598632812,
526
+ "learning_rate": 3.797037760416667e-05,
527
+ "loss": 3.2451,
528
+ "step": 7400
529
+ },
530
+ {
531
+ "epoch": 0.732421875,
532
+ "grad_norm": 12.314142227172852,
533
+ "learning_rate": 3.78076171875e-05,
534
+ "loss": 3.2721,
535
+ "step": 7500
536
+ },
537
+ {
538
+ "epoch": 0.7421875,
539
+ "grad_norm": 11.418351173400879,
540
+ "learning_rate": 3.764485677083334e-05,
541
+ "loss": 3.2943,
542
+ "step": 7600
543
+ },
544
+ {
545
+ "epoch": 0.751953125,
546
+ "grad_norm": 16.546539306640625,
547
+ "learning_rate": 3.748209635416667e-05,
548
+ "loss": 3.2243,
549
+ "step": 7700
550
+ },
551
+ {
552
+ "epoch": 0.76171875,
553
+ "grad_norm": 16.984668731689453,
554
+ "learning_rate": 3.73193359375e-05,
555
+ "loss": 3.3024,
556
+ "step": 7800
557
+ },
558
+ {
559
+ "epoch": 0.771484375,
560
+ "grad_norm": 11.702521324157715,
561
+ "learning_rate": 3.715657552083334e-05,
562
+ "loss": 3.1967,
563
+ "step": 7900
564
+ },
565
+ {
566
+ "epoch": 0.78125,
567
+ "grad_norm": 12.537822723388672,
568
+ "learning_rate": 3.699381510416667e-05,
569
+ "loss": 3.2535,
570
+ "step": 8000
571
+ },
572
+ {
573
+ "epoch": 0.791015625,
574
+ "grad_norm": 13.640584945678711,
575
+ "learning_rate": 3.68310546875e-05,
576
+ "loss": 3.1677,
577
+ "step": 8100
578
+ },
579
+ {
580
+ "epoch": 0.80078125,
581
+ "grad_norm": 15.423649787902832,
582
+ "learning_rate": 3.6668294270833336e-05,
583
+ "loss": 3.2641,
584
+ "step": 8200
585
+ },
586
+ {
587
+ "epoch": 0.810546875,
588
+ "grad_norm": 12.065776824951172,
589
+ "learning_rate": 3.650553385416667e-05,
590
+ "loss": 3.1169,
591
+ "step": 8300
592
+ },
593
+ {
594
+ "epoch": 0.8203125,
595
+ "grad_norm": 14.259243965148926,
596
+ "learning_rate": 3.63427734375e-05,
597
+ "loss": 3.2357,
598
+ "step": 8400
599
+ },
600
+ {
601
+ "epoch": 0.830078125,
602
+ "grad_norm": 17.08042335510254,
603
+ "learning_rate": 3.6180013020833336e-05,
604
+ "loss": 3.3104,
605
+ "step": 8500
606
+ },
607
+ {
608
+ "epoch": 0.83984375,
609
+ "grad_norm": 10.867400169372559,
610
+ "learning_rate": 3.601725260416667e-05,
611
+ "loss": 3.296,
612
+ "step": 8600
613
+ },
614
+ {
615
+ "epoch": 0.849609375,
616
+ "grad_norm": 13.106012344360352,
617
+ "learning_rate": 3.58544921875e-05,
618
+ "loss": 3.2956,
619
+ "step": 8700
620
+ },
621
+ {
622
+ "epoch": 0.859375,
623
+ "grad_norm": 15.86103630065918,
624
+ "learning_rate": 3.5691731770833335e-05,
625
+ "loss": 3.2569,
626
+ "step": 8800
627
+ },
628
+ {
629
+ "epoch": 0.869140625,
630
+ "grad_norm": 11.86782169342041,
631
+ "learning_rate": 3.5528971354166666e-05,
632
+ "loss": 3.1775,
633
+ "step": 8900
634
+ },
635
+ {
636
+ "epoch": 0.87890625,
637
+ "grad_norm": 13.925124168395996,
638
+ "learning_rate": 3.53662109375e-05,
639
+ "loss": 3.1556,
640
+ "step": 9000
641
+ },
642
+ {
643
+ "epoch": 0.888671875,
644
+ "grad_norm": 15.298331260681152,
645
+ "learning_rate": 3.5203450520833335e-05,
646
+ "loss": 3.2081,
647
+ "step": 9100
648
+ },
649
+ {
650
+ "epoch": 0.8984375,
651
+ "grad_norm": 12.989383697509766,
652
+ "learning_rate": 3.5040690104166666e-05,
653
+ "loss": 3.2758,
654
+ "step": 9200
655
+ },
656
+ {
657
+ "epoch": 0.908203125,
658
+ "grad_norm": 12.797562599182129,
659
+ "learning_rate": 3.48779296875e-05,
660
+ "loss": 3.0895,
661
+ "step": 9300
662
+ },
663
+ {
664
+ "epoch": 0.91796875,
665
+ "grad_norm": 10.773366928100586,
666
+ "learning_rate": 3.4715169270833335e-05,
667
+ "loss": 3.1352,
668
+ "step": 9400
669
+ },
670
+ {
671
+ "epoch": 0.927734375,
672
+ "grad_norm": 13.436513900756836,
673
+ "learning_rate": 3.4552408854166665e-05,
674
+ "loss": 3.2934,
675
+ "step": 9500
676
+ },
677
+ {
678
+ "epoch": 0.9375,
679
+ "grad_norm": 12.20578670501709,
680
+ "learning_rate": 3.4389648437499996e-05,
681
+ "loss": 3.22,
682
+ "step": 9600
683
+ },
684
+ {
685
+ "epoch": 0.947265625,
686
+ "grad_norm": 15.03205680847168,
687
+ "learning_rate": 3.4226888020833334e-05,
688
+ "loss": 3.1925,
689
+ "step": 9700
690
+ },
691
+ {
692
+ "epoch": 0.95703125,
693
+ "grad_norm": 15.373735427856445,
694
+ "learning_rate": 3.4064127604166665e-05,
695
+ "loss": 3.1056,
696
+ "step": 9800
697
+ },
698
+ {
699
+ "epoch": 0.966796875,
700
+ "grad_norm": 13.795890808105469,
701
+ "learning_rate": 3.39013671875e-05,
702
+ "loss": 3.1484,
703
+ "step": 9900
704
+ },
705
+ {
706
+ "epoch": 0.9765625,
707
+ "grad_norm": 15.731973648071289,
708
+ "learning_rate": 3.3738606770833334e-05,
709
+ "loss": 3.2102,
710
+ "step": 10000
711
+ },
712
+ {
713
+ "epoch": 0.986328125,
714
+ "grad_norm": 14.516192436218262,
715
+ "learning_rate": 3.3575846354166665e-05,
716
+ "loss": 3.1795,
717
+ "step": 10100
718
+ },
719
+ {
720
+ "epoch": 0.99609375,
721
+ "grad_norm": 11.511063575744629,
722
+ "learning_rate": 3.34130859375e-05,
723
+ "loss": 3.1479,
724
+ "step": 10200
725
+ },
726
+ {
727
+ "epoch": 1.005859375,
728
+ "grad_norm": 13.947763442993164,
729
+ "learning_rate": 3.325032552083333e-05,
730
+ "loss": 3.1697,
731
+ "step": 10300
732
+ },
733
+ {
734
+ "epoch": 1.015625,
735
+ "grad_norm": 9.85244369506836,
736
+ "learning_rate": 3.308756510416667e-05,
737
+ "loss": 3.1739,
738
+ "step": 10400
739
+ },
740
+ {
741
+ "epoch": 1.025390625,
742
+ "grad_norm": 21.973459243774414,
743
+ "learning_rate": 3.29248046875e-05,
744
+ "loss": 2.9587,
745
+ "step": 10500
746
+ },
747
+ {
748
+ "epoch": 1.03515625,
749
+ "grad_norm": 11.420741081237793,
750
+ "learning_rate": 3.276204427083334e-05,
751
+ "loss": 3.1121,
752
+ "step": 10600
753
+ },
754
+ {
755
+ "epoch": 1.044921875,
756
+ "grad_norm": 10.605298042297363,
757
+ "learning_rate": 3.259928385416667e-05,
758
+ "loss": 3.0928,
759
+ "step": 10700
760
+ },
761
+ {
762
+ "epoch": 1.0546875,
763
+ "grad_norm": 15.158004760742188,
764
+ "learning_rate": 3.24365234375e-05,
765
+ "loss": 3.0272,
766
+ "step": 10800
767
+ },
768
+ {
769
+ "epoch": 1.064453125,
770
+ "grad_norm": 12.386876106262207,
771
+ "learning_rate": 3.227376302083334e-05,
772
+ "loss": 3.055,
773
+ "step": 10900
774
+ },
775
+ {
776
+ "epoch": 1.07421875,
777
+ "grad_norm": 14.404585838317871,
778
+ "learning_rate": 3.211100260416667e-05,
779
+ "loss": 3.0203,
780
+ "step": 11000
781
+ },
782
+ {
783
+ "epoch": 1.083984375,
784
+ "grad_norm": 14.187972068786621,
785
+ "learning_rate": 3.19482421875e-05,
786
+ "loss": 3.1315,
787
+ "step": 11100
788
+ },
789
+ {
790
+ "epoch": 1.09375,
791
+ "grad_norm": 11.506830215454102,
792
+ "learning_rate": 3.178548177083334e-05,
793
+ "loss": 3.0323,
794
+ "step": 11200
795
+ },
796
+ {
797
+ "epoch": 1.103515625,
798
+ "grad_norm": 14.504237174987793,
799
+ "learning_rate": 3.162272135416667e-05,
800
+ "loss": 2.9987,
801
+ "step": 11300
802
+ },
803
+ {
804
+ "epoch": 1.11328125,
805
+ "grad_norm": 12.95081615447998,
806
+ "learning_rate": 3.14599609375e-05,
807
+ "loss": 2.9773,
808
+ "step": 11400
809
+ },
810
+ {
811
+ "epoch": 1.123046875,
812
+ "grad_norm": 12.512577056884766,
813
+ "learning_rate": 3.129720052083334e-05,
814
+ "loss": 3.1454,
815
+ "step": 11500
816
+ },
817
+ {
818
+ "epoch": 1.1328125,
819
+ "grad_norm": 11.951395988464355,
820
+ "learning_rate": 3.113444010416667e-05,
821
+ "loss": 2.9314,
822
+ "step": 11600
823
+ },
824
+ {
825
+ "epoch": 1.142578125,
826
+ "grad_norm": 16.119001388549805,
827
+ "learning_rate": 3.09716796875e-05,
828
+ "loss": 2.9137,
829
+ "step": 11700
830
+ },
831
+ {
832
+ "epoch": 1.15234375,
833
+ "grad_norm": 10.542379379272461,
834
+ "learning_rate": 3.080891927083334e-05,
835
+ "loss": 3.1176,
836
+ "step": 11800
837
+ },
838
+ {
839
+ "epoch": 1.162109375,
840
+ "grad_norm": 14.544652938842773,
841
+ "learning_rate": 3.064615885416667e-05,
842
+ "loss": 2.9937,
843
+ "step": 11900
844
+ },
845
+ {
846
+ "epoch": 1.171875,
847
+ "grad_norm": 11.01784896850586,
848
+ "learning_rate": 3.04833984375e-05,
849
+ "loss": 3.0301,
850
+ "step": 12000
851
+ },
852
+ {
853
+ "epoch": 1.181640625,
854
+ "grad_norm": 12.359283447265625,
855
+ "learning_rate": 3.0320638020833337e-05,
856
+ "loss": 2.9277,
857
+ "step": 12100
858
+ },
859
+ {
860
+ "epoch": 1.19140625,
861
+ "grad_norm": 18.171897888183594,
862
+ "learning_rate": 3.015787760416667e-05,
863
+ "loss": 3.0673,
864
+ "step": 12200
865
+ },
866
+ {
867
+ "epoch": 1.201171875,
868
+ "grad_norm": 12.454042434692383,
869
+ "learning_rate": 2.99951171875e-05,
870
+ "loss": 2.9398,
871
+ "step": 12300
872
+ },
873
+ {
874
+ "epoch": 1.2109375,
875
+ "grad_norm": 12.611340522766113,
876
+ "learning_rate": 2.9832356770833337e-05,
877
+ "loss": 2.926,
878
+ "step": 12400
879
+ },
880
+ {
881
+ "epoch": 1.220703125,
882
+ "grad_norm": 12.207980155944824,
883
+ "learning_rate": 2.9669596354166668e-05,
884
+ "loss": 2.9713,
885
+ "step": 12500
886
+ },
887
+ {
888
+ "epoch": 1.23046875,
889
+ "grad_norm": 11.924321174621582,
890
+ "learning_rate": 2.95068359375e-05,
891
+ "loss": 3.0734,
892
+ "step": 12600
893
+ },
894
+ {
895
+ "epoch": 1.240234375,
896
+ "grad_norm": 10.981707572937012,
897
+ "learning_rate": 2.9344075520833337e-05,
898
+ "loss": 2.983,
899
+ "step": 12700
900
+ },
901
+ {
902
+ "epoch": 1.25,
903
+ "grad_norm": 9.699291229248047,
904
+ "learning_rate": 2.9181315104166667e-05,
905
+ "loss": 3.0235,
906
+ "step": 12800
907
+ },
908
+ {
909
+ "epoch": 1.259765625,
910
+ "grad_norm": 11.410511016845703,
911
+ "learning_rate": 2.90185546875e-05,
912
+ "loss": 3.081,
913
+ "step": 12900
914
+ },
915
+ {
916
+ "epoch": 1.26953125,
917
+ "grad_norm": 20.204944610595703,
918
+ "learning_rate": 2.8855794270833336e-05,
919
+ "loss": 2.958,
920
+ "step": 13000
921
+ },
922
+ {
923
+ "epoch": 1.279296875,
924
+ "grad_norm": 9.388766288757324,
925
+ "learning_rate": 2.8693033854166667e-05,
926
+ "loss": 3.0007,
927
+ "step": 13100
928
+ },
929
+ {
930
+ "epoch": 1.2890625,
931
+ "grad_norm": 11.05562973022461,
932
+ "learning_rate": 2.8530273437499998e-05,
933
+ "loss": 3.0186,
934
+ "step": 13200
935
+ },
936
+ {
937
+ "epoch": 1.298828125,
938
+ "grad_norm": 12.052275657653809,
939
+ "learning_rate": 2.8367513020833336e-05,
940
+ "loss": 2.9891,
941
+ "step": 13300
942
+ },
943
+ {
944
+ "epoch": 1.30859375,
945
+ "grad_norm": 17.93643569946289,
946
+ "learning_rate": 2.8204752604166667e-05,
947
+ "loss": 2.9863,
948
+ "step": 13400
949
+ },
950
+ {
951
+ "epoch": 1.318359375,
952
+ "grad_norm": 16.745187759399414,
953
+ "learning_rate": 2.8041992187499998e-05,
954
+ "loss": 2.963,
955
+ "step": 13500
956
+ },
957
+ {
958
+ "epoch": 1.328125,
959
+ "grad_norm": 11.541468620300293,
960
+ "learning_rate": 2.7879231770833335e-05,
961
+ "loss": 3.0955,
962
+ "step": 13600
963
+ },
964
+ {
965
+ "epoch": 1.337890625,
966
+ "grad_norm": 12.31360912322998,
967
+ "learning_rate": 2.7716471354166666e-05,
968
+ "loss": 3.0445,
969
+ "step": 13700
970
+ },
971
+ {
972
+ "epoch": 1.34765625,
973
+ "grad_norm": 12.349579811096191,
974
+ "learning_rate": 2.75537109375e-05,
975
+ "loss": 2.9165,
976
+ "step": 13800
977
+ },
978
+ {
979
+ "epoch": 1.357421875,
980
+ "grad_norm": 11.030657768249512,
981
+ "learning_rate": 2.7390950520833335e-05,
982
+ "loss": 2.9315,
983
+ "step": 13900
984
+ },
985
+ {
986
+ "epoch": 1.3671875,
987
+ "grad_norm": 14.333861351013184,
988
+ "learning_rate": 2.722819010416667e-05,
989
+ "loss": 3.0059,
990
+ "step": 14000
991
+ },
992
+ {
993
+ "epoch": 1.376953125,
994
+ "grad_norm": 15.17336368560791,
995
+ "learning_rate": 2.70654296875e-05,
996
+ "loss": 3.0364,
997
+ "step": 14100
998
+ },
999
+ {
1000
+ "epoch": 1.38671875,
1001
+ "grad_norm": 14.133316040039062,
1002
+ "learning_rate": 2.6902669270833338e-05,
1003
+ "loss": 3.0579,
1004
+ "step": 14200
1005
+ },
1006
+ {
1007
+ "epoch": 1.396484375,
1008
+ "grad_norm": 11.148407936096191,
1009
+ "learning_rate": 2.673990885416667e-05,
1010
+ "loss": 2.975,
1011
+ "step": 14300
1012
+ },
1013
+ {
1014
+ "epoch": 1.40625,
1015
+ "grad_norm": 14.075923919677734,
1016
+ "learning_rate": 2.65771484375e-05,
1017
+ "loss": 3.0044,
1018
+ "step": 14400
1019
+ },
1020
+ {
1021
+ "epoch": 1.416015625,
1022
+ "grad_norm": 14.998821258544922,
1023
+ "learning_rate": 2.6414388020833337e-05,
1024
+ "loss": 2.9768,
1025
+ "step": 14500
1026
+ },
1027
+ {
1028
+ "epoch": 1.42578125,
1029
+ "grad_norm": 12.305022239685059,
1030
+ "learning_rate": 2.6251627604166668e-05,
1031
+ "loss": 2.9711,
1032
+ "step": 14600
1033
+ },
1034
+ {
1035
+ "epoch": 1.435546875,
1036
+ "grad_norm": 16.09569549560547,
1037
+ "learning_rate": 2.609049479166667e-05,
1038
+ "loss": 2.8913,
1039
+ "step": 14700
1040
+ },
1041
+ {
1042
+ "epoch": 1.4453125,
1043
+ "grad_norm": 10.328545570373535,
1044
+ "learning_rate": 2.5927734375e-05,
1045
+ "loss": 2.9569,
1046
+ "step": 14800
1047
+ },
1048
+ {
1049
+ "epoch": 1.455078125,
1050
+ "grad_norm": 10.365655899047852,
1051
+ "learning_rate": 2.576497395833333e-05,
1052
+ "loss": 2.9558,
1053
+ "step": 14900
1054
+ },
1055
+ {
1056
+ "epoch": 1.46484375,
1057
+ "grad_norm": 11.824304580688477,
1058
+ "learning_rate": 2.560221354166667e-05,
1059
+ "loss": 2.906,
1060
+ "step": 15000
1061
+ },
1062
+ {
1063
+ "epoch": 1.474609375,
1064
+ "grad_norm": 13.518112182617188,
1065
+ "learning_rate": 2.5439453125e-05,
1066
+ "loss": 2.9135,
1067
+ "step": 15100
1068
+ },
1069
+ {
1070
+ "epoch": 1.484375,
1071
+ "grad_norm": 13.241069793701172,
1072
+ "learning_rate": 2.527669270833333e-05,
1073
+ "loss": 2.8683,
1074
+ "step": 15200
1075
+ },
1076
+ {
1077
+ "epoch": 1.494140625,
1078
+ "grad_norm": 10.397425651550293,
1079
+ "learning_rate": 2.5115559895833336e-05,
1080
+ "loss": 2.9151,
1081
+ "step": 15300
1082
+ },
1083
+ {
1084
+ "epoch": 1.50390625,
1085
+ "grad_norm": 9.971900939941406,
1086
+ "learning_rate": 2.495279947916667e-05,
1087
+ "loss": 2.8387,
1088
+ "step": 15400
1089
+ },
1090
+ {
1091
+ "epoch": 1.513671875,
1092
+ "grad_norm": 12.148921966552734,
1093
+ "learning_rate": 2.47900390625e-05,
1094
+ "loss": 3.0045,
1095
+ "step": 15500
1096
+ },
1097
+ {
1098
+ "epoch": 1.5234375,
1099
+ "grad_norm": 12.122273445129395,
1100
+ "learning_rate": 2.4627278645833336e-05,
1101
+ "loss": 2.8201,
1102
+ "step": 15600
1103
+ },
1104
+ {
1105
+ "epoch": 1.533203125,
1106
+ "grad_norm": 10.754613876342773,
1107
+ "learning_rate": 2.446451822916667e-05,
1108
+ "loss": 3.0412,
1109
+ "step": 15700
1110
+ },
1111
+ {
1112
+ "epoch": 1.54296875,
1113
+ "grad_norm": 12.5385103225708,
1114
+ "learning_rate": 2.43017578125e-05,
1115
+ "loss": 2.9698,
1116
+ "step": 15800
1117
+ },
1118
+ {
1119
+ "epoch": 1.552734375,
1120
+ "grad_norm": 12.530341148376465,
1121
+ "learning_rate": 2.4138997395833335e-05,
1122
+ "loss": 2.9844,
1123
+ "step": 15900
1124
+ },
1125
+ {
1126
+ "epoch": 1.5625,
1127
+ "grad_norm": 12.995160102844238,
1128
+ "learning_rate": 2.397623697916667e-05,
1129
+ "loss": 2.9505,
1130
+ "step": 16000
1131
+ },
1132
+ {
1133
+ "epoch": 1.572265625,
1134
+ "grad_norm": 11.06143856048584,
1135
+ "learning_rate": 2.38134765625e-05,
1136
+ "loss": 2.9299,
1137
+ "step": 16100
1138
+ },
1139
+ {
1140
+ "epoch": 1.58203125,
1141
+ "grad_norm": 11.600089073181152,
1142
+ "learning_rate": 2.3650716145833335e-05,
1143
+ "loss": 2.9184,
1144
+ "step": 16200
1145
+ },
1146
+ {
1147
+ "epoch": 1.591796875,
1148
+ "grad_norm": 9.294841766357422,
1149
+ "learning_rate": 2.348795572916667e-05,
1150
+ "loss": 3.0312,
1151
+ "step": 16300
1152
+ },
1153
+ {
1154
+ "epoch": 1.6015625,
1155
+ "grad_norm": 13.555610656738281,
1156
+ "learning_rate": 2.33251953125e-05,
1157
+ "loss": 2.899,
1158
+ "step": 16400
1159
+ },
1160
+ {
1161
+ "epoch": 1.611328125,
1162
+ "grad_norm": 11.077611923217773,
1163
+ "learning_rate": 2.3162434895833334e-05,
1164
+ "loss": 2.9032,
1165
+ "step": 16500
1166
+ },
1167
+ {
1168
+ "epoch": 1.62109375,
1169
+ "grad_norm": 12.092378616333008,
1170
+ "learning_rate": 2.299967447916667e-05,
1171
+ "loss": 3.0475,
1172
+ "step": 16600
1173
+ },
1174
+ {
1175
+ "epoch": 1.630859375,
1176
+ "grad_norm": 18.95319175720215,
1177
+ "learning_rate": 2.28369140625e-05,
1178
+ "loss": 2.8485,
1179
+ "step": 16700
1180
+ },
1181
+ {
1182
+ "epoch": 1.640625,
1183
+ "grad_norm": 20.765520095825195,
1184
+ "learning_rate": 2.2674153645833334e-05,
1185
+ "loss": 3.0211,
1186
+ "step": 16800
1187
+ },
1188
+ {
1189
+ "epoch": 1.650390625,
1190
+ "grad_norm": 19.739500045776367,
1191
+ "learning_rate": 2.2511393229166668e-05,
1192
+ "loss": 3.0398,
1193
+ "step": 16900
1194
+ },
1195
+ {
1196
+ "epoch": 1.66015625,
1197
+ "grad_norm": 12.64313793182373,
1198
+ "learning_rate": 2.23486328125e-05,
1199
+ "loss": 2.8028,
1200
+ "step": 17000
1201
+ },
1202
+ {
1203
+ "epoch": 1.669921875,
1204
+ "grad_norm": 19.063640594482422,
1205
+ "learning_rate": 2.2185872395833333e-05,
1206
+ "loss": 2.896,
1207
+ "step": 17100
1208
+ },
1209
+ {
1210
+ "epoch": 1.6796875,
1211
+ "grad_norm": 23.34886932373047,
1212
+ "learning_rate": 2.2023111979166668e-05,
1213
+ "loss": 2.9162,
1214
+ "step": 17200
1215
+ },
1216
+ {
1217
+ "epoch": 1.689453125,
1218
+ "grad_norm": 11.44904899597168,
1219
+ "learning_rate": 2.1860351562500002e-05,
1220
+ "loss": 2.9423,
1221
+ "step": 17300
1222
+ },
1223
+ {
1224
+ "epoch": 1.69921875,
1225
+ "grad_norm": 16.1793155670166,
1226
+ "learning_rate": 2.1697591145833336e-05,
1227
+ "loss": 2.917,
1228
+ "step": 17400
1229
+ },
1230
+ {
1231
+ "epoch": 1.708984375,
1232
+ "grad_norm": 9.111202239990234,
1233
+ "learning_rate": 2.1534830729166667e-05,
1234
+ "loss": 2.9147,
1235
+ "step": 17500
1236
+ },
1237
+ {
1238
+ "epoch": 1.71875,
1239
+ "grad_norm": 12.297972679138184,
1240
+ "learning_rate": 2.13720703125e-05,
1241
+ "loss": 2.8541,
1242
+ "step": 17600
1243
+ },
1244
+ {
1245
+ "epoch": 1.728515625,
1246
+ "grad_norm": 14.833362579345703,
1247
+ "learning_rate": 2.1209309895833336e-05,
1248
+ "loss": 2.9548,
1249
+ "step": 17700
1250
+ },
1251
+ {
1252
+ "epoch": 1.73828125,
1253
+ "grad_norm": 11.36043643951416,
1254
+ "learning_rate": 2.1048177083333334e-05,
1255
+ "loss": 2.8732,
1256
+ "step": 17800
1257
+ },
1258
+ {
1259
+ "epoch": 1.748046875,
1260
+ "grad_norm": 12.69233512878418,
1261
+ "learning_rate": 2.088541666666667e-05,
1262
+ "loss": 2.9447,
1263
+ "step": 17900
1264
+ },
1265
+ {
1266
+ "epoch": 1.7578125,
1267
+ "grad_norm": 13.44200611114502,
1268
+ "learning_rate": 2.072265625e-05,
1269
+ "loss": 2.8824,
1270
+ "step": 18000
1271
+ },
1272
+ {
1273
+ "epoch": 1.767578125,
1274
+ "grad_norm": 10.753628730773926,
1275
+ "learning_rate": 2.05615234375e-05,
1276
+ "loss": 2.8455,
1277
+ "step": 18100
1278
+ },
1279
+ {
1280
+ "epoch": 1.77734375,
1281
+ "grad_norm": 13.760424613952637,
1282
+ "learning_rate": 2.0398763020833335e-05,
1283
+ "loss": 2.9853,
1284
+ "step": 18200
1285
+ },
1286
+ {
1287
+ "epoch": 1.787109375,
1288
+ "grad_norm": 16.471168518066406,
1289
+ "learning_rate": 2.023600260416667e-05,
1290
+ "loss": 2.8303,
1291
+ "step": 18300
1292
+ },
1293
+ {
1294
+ "epoch": 1.796875,
1295
+ "grad_norm": 13.708788871765137,
1296
+ "learning_rate": 2.0073242187500004e-05,
1297
+ "loss": 2.9086,
1298
+ "step": 18400
1299
+ },
1300
+ {
1301
+ "epoch": 1.806640625,
1302
+ "grad_norm": 13.522102355957031,
1303
+ "learning_rate": 1.9912109375000002e-05,
1304
+ "loss": 2.9567,
1305
+ "step": 18500
1306
+ },
1307
+ {
1308
+ "epoch": 1.81640625,
1309
+ "grad_norm": 14.42663288116455,
1310
+ "learning_rate": 1.9749348958333333e-05,
1311
+ "loss": 2.8666,
1312
+ "step": 18600
1313
+ },
1314
+ {
1315
+ "epoch": 1.826171875,
1316
+ "grad_norm": 10.03260326385498,
1317
+ "learning_rate": 1.9586588541666667e-05,
1318
+ "loss": 2.9193,
1319
+ "step": 18700
1320
+ },
1321
+ {
1322
+ "epoch": 1.8359375,
1323
+ "grad_norm": 10.757763862609863,
1324
+ "learning_rate": 1.9423828125e-05,
1325
+ "loss": 3.0071,
1326
+ "step": 18800
1327
+ },
1328
+ {
1329
+ "epoch": 1.845703125,
1330
+ "grad_norm": 12.414703369140625,
1331
+ "learning_rate": 1.9261067708333332e-05,
1332
+ "loss": 2.9446,
1333
+ "step": 18900
1334
+ },
1335
+ {
1336
+ "epoch": 1.85546875,
1337
+ "grad_norm": 12.182251930236816,
1338
+ "learning_rate": 1.9098307291666667e-05,
1339
+ "loss": 2.9584,
1340
+ "step": 19000
1341
+ },
1342
+ {
1343
+ "epoch": 1.865234375,
1344
+ "grad_norm": 13.588275909423828,
1345
+ "learning_rate": 1.8935546875e-05,
1346
+ "loss": 2.8303,
1347
+ "step": 19100
1348
+ },
1349
+ {
1350
+ "epoch": 1.875,
1351
+ "grad_norm": 11.538961410522461,
1352
+ "learning_rate": 1.8772786458333332e-05,
1353
+ "loss": 2.9129,
1354
+ "step": 19200
1355
+ },
1356
+ {
1357
+ "epoch": 1.884765625,
1358
+ "grad_norm": 14.856468200683594,
1359
+ "learning_rate": 1.8610026041666666e-05,
1360
+ "loss": 2.8229,
1361
+ "step": 19300
1362
+ },
1363
+ {
1364
+ "epoch": 1.89453125,
1365
+ "grad_norm": 12.910444259643555,
1366
+ "learning_rate": 1.8447265625e-05,
1367
+ "loss": 2.9851,
1368
+ "step": 19400
1369
+ },
1370
+ {
1371
+ "epoch": 1.904296875,
1372
+ "grad_norm": 12.187970161437988,
1373
+ "learning_rate": 1.828450520833333e-05,
1374
+ "loss": 2.863,
1375
+ "step": 19500
1376
+ },
1377
+ {
1378
+ "epoch": 1.9140625,
1379
+ "grad_norm": 16.951251983642578,
1380
+ "learning_rate": 1.8121744791666666e-05,
1381
+ "loss": 2.8726,
1382
+ "step": 19600
1383
+ },
1384
+ {
1385
+ "epoch": 1.923828125,
1386
+ "grad_norm": 10.626237869262695,
1387
+ "learning_rate": 1.7958984375e-05,
1388
+ "loss": 2.79,
1389
+ "step": 19700
1390
+ },
1391
+ {
1392
+ "epoch": 1.93359375,
1393
+ "grad_norm": 13.63193416595459,
1394
+ "learning_rate": 1.7796223958333334e-05,
1395
+ "loss": 2.8807,
1396
+ "step": 19800
1397
+ },
1398
+ {
1399
+ "epoch": 1.943359375,
1400
+ "grad_norm": 12.877738952636719,
1401
+ "learning_rate": 1.763346354166667e-05,
1402
+ "loss": 2.7945,
1403
+ "step": 19900
1404
+ },
1405
+ {
1406
+ "epoch": 1.953125,
1407
+ "grad_norm": 12.08793830871582,
1408
+ "learning_rate": 1.7470703125000003e-05,
1409
+ "loss": 2.8932,
1410
+ "step": 20000
1411
+ },
1412
+ {
1413
+ "epoch": 1.962890625,
1414
+ "grad_norm": 12.529877662658691,
1415
+ "learning_rate": 1.7307942708333334e-05,
1416
+ "loss": 2.9178,
1417
+ "step": 20100
1418
+ },
1419
+ {
1420
+ "epoch": 1.97265625,
1421
+ "grad_norm": 12.949920654296875,
1422
+ "learning_rate": 1.7145182291666668e-05,
1423
+ "loss": 2.8207,
1424
+ "step": 20200
1425
+ },
1426
+ {
1427
+ "epoch": 1.982421875,
1428
+ "grad_norm": 16.327682495117188,
1429
+ "learning_rate": 1.6982421875000003e-05,
1430
+ "loss": 2.9098,
1431
+ "step": 20300
1432
+ },
1433
+ {
1434
+ "epoch": 1.9921875,
1435
+ "grad_norm": 13.496146202087402,
1436
+ "learning_rate": 1.6819661458333334e-05,
1437
+ "loss": 2.7364,
1438
+ "step": 20400
1439
+ },
1440
+ {
1441
+ "epoch": 2.001953125,
1442
+ "grad_norm": 18.41584014892578,
1443
+ "learning_rate": 1.6656901041666668e-05,
1444
+ "loss": 2.8233,
1445
+ "step": 20500
1446
+ },
1447
+ {
1448
+ "epoch": 2.01171875,
1449
+ "grad_norm": 14.442363739013672,
1450
+ "learning_rate": 1.6494140625000002e-05,
1451
+ "loss": 2.7373,
1452
+ "step": 20600
1453
+ },
1454
+ {
1455
+ "epoch": 2.021484375,
1456
+ "grad_norm": 13.7149019241333,
1457
+ "learning_rate": 1.6331380208333333e-05,
1458
+ "loss": 2.7693,
1459
+ "step": 20700
1460
+ },
1461
+ {
1462
+ "epoch": 2.03125,
1463
+ "grad_norm": 17.81890296936035,
1464
+ "learning_rate": 1.6168619791666667e-05,
1465
+ "loss": 2.7528,
1466
+ "step": 20800
1467
+ },
1468
+ {
1469
+ "epoch": 2.041015625,
1470
+ "grad_norm": 11.57737922668457,
1471
+ "learning_rate": 1.6007486979166666e-05,
1472
+ "loss": 2.7167,
1473
+ "step": 20900
1474
+ },
1475
+ {
1476
+ "epoch": 2.05078125,
1477
+ "grad_norm": 12.967355728149414,
1478
+ "learning_rate": 1.58447265625e-05,
1479
+ "loss": 2.7034,
1480
+ "step": 21000
1481
+ },
1482
+ {
1483
+ "epoch": 2.060546875,
1484
+ "grad_norm": 10.983343124389648,
1485
+ "learning_rate": 1.568359375e-05,
1486
+ "loss": 2.7766,
1487
+ "step": 21100
1488
+ },
1489
+ {
1490
+ "epoch": 2.0703125,
1491
+ "grad_norm": 16.468509674072266,
1492
+ "learning_rate": 1.5520833333333336e-05,
1493
+ "loss": 2.7992,
1494
+ "step": 21200
1495
+ },
1496
+ {
1497
+ "epoch": 2.080078125,
1498
+ "grad_norm": 16.79848861694336,
1499
+ "learning_rate": 1.535807291666667e-05,
1500
+ "loss": 2.7826,
1501
+ "step": 21300
1502
+ },
1503
+ {
1504
+ "epoch": 2.08984375,
1505
+ "grad_norm": 16.537952423095703,
1506
+ "learning_rate": 1.5195312500000001e-05,
1507
+ "loss": 2.7119,
1508
+ "step": 21400
1509
+ },
1510
+ {
1511
+ "epoch": 2.099609375,
1512
+ "grad_norm": 11.28368854522705,
1513
+ "learning_rate": 1.5032552083333335e-05,
1514
+ "loss": 2.7093,
1515
+ "step": 21500
1516
+ },
1517
+ {
1518
+ "epoch": 2.109375,
1519
+ "grad_norm": 58.54060745239258,
1520
+ "learning_rate": 1.4869791666666668e-05,
1521
+ "loss": 2.7219,
1522
+ "step": 21600
1523
+ },
1524
+ {
1525
+ "epoch": 2.119140625,
1526
+ "grad_norm": 12.917949676513672,
1527
+ "learning_rate": 1.470703125e-05,
1528
+ "loss": 2.8239,
1529
+ "step": 21700
1530
+ },
1531
+ {
1532
+ "epoch": 2.12890625,
1533
+ "grad_norm": 18.68291664123535,
1534
+ "learning_rate": 1.4544270833333335e-05,
1535
+ "loss": 2.7574,
1536
+ "step": 21800
1537
+ },
1538
+ {
1539
+ "epoch": 2.138671875,
1540
+ "grad_norm": 11.076837539672852,
1541
+ "learning_rate": 1.4381510416666669e-05,
1542
+ "loss": 2.8418,
1543
+ "step": 21900
1544
+ },
1545
+ {
1546
+ "epoch": 2.1484375,
1547
+ "grad_norm": 16.057594299316406,
1548
+ "learning_rate": 1.421875e-05,
1549
+ "loss": 2.822,
1550
+ "step": 22000
1551
+ },
1552
+ {
1553
+ "epoch": 2.158203125,
1554
+ "grad_norm": 17.087045669555664,
1555
+ "learning_rate": 1.4055989583333334e-05,
1556
+ "loss": 2.7588,
1557
+ "step": 22100
1558
+ },
1559
+ {
1560
+ "epoch": 2.16796875,
1561
+ "grad_norm": 11.648773193359375,
1562
+ "learning_rate": 1.3893229166666669e-05,
1563
+ "loss": 2.717,
1564
+ "step": 22200
1565
+ },
1566
+ {
1567
+ "epoch": 2.177734375,
1568
+ "grad_norm": 12.739274024963379,
1569
+ "learning_rate": 1.373046875e-05,
1570
+ "loss": 2.6956,
1571
+ "step": 22300
1572
+ },
1573
+ {
1574
+ "epoch": 2.1875,
1575
+ "grad_norm": 10.17639446258545,
1576
+ "learning_rate": 1.3567708333333334e-05,
1577
+ "loss": 2.8241,
1578
+ "step": 22400
1579
+ },
1580
+ {
1581
+ "epoch": 2.197265625,
1582
+ "grad_norm": 13.572341918945312,
1583
+ "learning_rate": 1.3404947916666668e-05,
1584
+ "loss": 2.7858,
1585
+ "step": 22500
1586
+ },
1587
+ {
1588
+ "epoch": 2.20703125,
1589
+ "grad_norm": 14.310699462890625,
1590
+ "learning_rate": 1.3242187500000001e-05,
1591
+ "loss": 2.6909,
1592
+ "step": 22600
1593
+ },
1594
+ {
1595
+ "epoch": 2.216796875,
1596
+ "grad_norm": 11.991633415222168,
1597
+ "learning_rate": 1.3079427083333335e-05,
1598
+ "loss": 2.7708,
1599
+ "step": 22700
1600
+ },
1601
+ {
1602
+ "epoch": 2.2265625,
1603
+ "grad_norm": 14.214717864990234,
1604
+ "learning_rate": 1.2916666666666668e-05,
1605
+ "loss": 2.7603,
1606
+ "step": 22800
1607
+ },
1608
+ {
1609
+ "epoch": 2.236328125,
1610
+ "grad_norm": 16.019987106323242,
1611
+ "learning_rate": 1.275390625e-05,
1612
+ "loss": 2.7493,
1613
+ "step": 22900
1614
+ },
1615
+ {
1616
+ "epoch": 2.24609375,
1617
+ "grad_norm": 11.817761421203613,
1618
+ "learning_rate": 1.2591145833333335e-05,
1619
+ "loss": 2.7647,
1620
+ "step": 23000
1621
+ },
1622
+ {
1623
+ "epoch": 2.255859375,
1624
+ "grad_norm": 16.247276306152344,
1625
+ "learning_rate": 1.2430013020833335e-05,
1626
+ "loss": 2.7605,
1627
+ "step": 23100
1628
+ },
1629
+ {
1630
+ "epoch": 2.265625,
1631
+ "grad_norm": 11.79404067993164,
1632
+ "learning_rate": 1.2267252604166667e-05,
1633
+ "loss": 2.7058,
1634
+ "step": 23200
1635
+ },
1636
+ {
1637
+ "epoch": 2.275390625,
1638
+ "grad_norm": 12.77724838256836,
1639
+ "learning_rate": 1.2104492187500001e-05,
1640
+ "loss": 2.7446,
1641
+ "step": 23300
1642
+ },
1643
+ {
1644
+ "epoch": 2.28515625,
1645
+ "grad_norm": 11.609589576721191,
1646
+ "learning_rate": 1.1941731770833334e-05,
1647
+ "loss": 2.7794,
1648
+ "step": 23400
1649
+ },
1650
+ {
1651
+ "epoch": 2.294921875,
1652
+ "grad_norm": 13.240425109863281,
1653
+ "learning_rate": 1.1780598958333334e-05,
1654
+ "loss": 2.8056,
1655
+ "step": 23500
1656
+ },
1657
+ {
1658
+ "epoch": 2.3046875,
1659
+ "grad_norm": 15.682677268981934,
1660
+ "learning_rate": 1.1617838541666668e-05,
1661
+ "loss": 2.7102,
1662
+ "step": 23600
1663
+ },
1664
+ {
1665
+ "epoch": 2.314453125,
1666
+ "grad_norm": 15.334482192993164,
1667
+ "learning_rate": 1.1455078125e-05,
1668
+ "loss": 2.8026,
1669
+ "step": 23700
1670
+ },
1671
+ {
1672
+ "epoch": 2.32421875,
1673
+ "grad_norm": 11.944014549255371,
1674
+ "learning_rate": 1.1292317708333335e-05,
1675
+ "loss": 2.7106,
1676
+ "step": 23800
1677
+ },
1678
+ {
1679
+ "epoch": 2.333984375,
1680
+ "grad_norm": 13.437361717224121,
1681
+ "learning_rate": 1.1129557291666668e-05,
1682
+ "loss": 2.6837,
1683
+ "step": 23900
1684
+ },
1685
+ {
1686
+ "epoch": 2.34375,
1687
+ "grad_norm": 15.150136947631836,
1688
+ "learning_rate": 1.0966796875e-05,
1689
+ "loss": 2.7095,
1690
+ "step": 24000
1691
+ },
1692
+ {
1693
+ "epoch": 2.353515625,
1694
+ "grad_norm": 13.133088111877441,
1695
+ "learning_rate": 1.0804036458333335e-05,
1696
+ "loss": 2.7609,
1697
+ "step": 24100
1698
+ },
1699
+ {
1700
+ "epoch": 2.36328125,
1701
+ "grad_norm": 12.005653381347656,
1702
+ "learning_rate": 1.0641276041666667e-05,
1703
+ "loss": 2.7086,
1704
+ "step": 24200
1705
+ },
1706
+ {
1707
+ "epoch": 2.373046875,
1708
+ "grad_norm": 20.258712768554688,
1709
+ "learning_rate": 1.0478515625e-05,
1710
+ "loss": 2.8082,
1711
+ "step": 24300
1712
+ },
1713
+ {
1714
+ "epoch": 2.3828125,
1715
+ "grad_norm": 14.602194786071777,
1716
+ "learning_rate": 1.0315755208333334e-05,
1717
+ "loss": 2.6246,
1718
+ "step": 24400
1719
+ },
1720
+ {
1721
+ "epoch": 2.392578125,
1722
+ "grad_norm": 13.00714111328125,
1723
+ "learning_rate": 1.0152994791666667e-05,
1724
+ "loss": 2.8249,
1725
+ "step": 24500
1726
+ },
1727
+ {
1728
+ "epoch": 2.40234375,
1729
+ "grad_norm": 11.645508766174316,
1730
+ "learning_rate": 9.990234375e-06,
1731
+ "loss": 2.7555,
1732
+ "step": 24600
1733
+ },
1734
+ {
1735
+ "epoch": 2.412109375,
1736
+ "grad_norm": 17.61017417907715,
1737
+ "learning_rate": 9.827473958333334e-06,
1738
+ "loss": 2.6942,
1739
+ "step": 24700
1740
+ },
1741
+ {
1742
+ "epoch": 2.421875,
1743
+ "grad_norm": 12.34157943725586,
1744
+ "learning_rate": 9.664713541666668e-06,
1745
+ "loss": 2.7215,
1746
+ "step": 24800
1747
+ },
1748
+ {
1749
+ "epoch": 2.431640625,
1750
+ "grad_norm": 12.765501976013184,
1751
+ "learning_rate": 9.501953125e-06,
1752
+ "loss": 2.5747,
1753
+ "step": 24900
1754
+ },
1755
+ {
1756
+ "epoch": 2.44140625,
1757
+ "grad_norm": 12.85317611694336,
1758
+ "learning_rate": 9.339192708333335e-06,
1759
+ "loss": 2.7917,
1760
+ "step": 25000
1761
+ },
1762
+ {
1763
+ "epoch": 2.451171875,
1764
+ "grad_norm": 12.610406875610352,
1765
+ "learning_rate": 9.176432291666668e-06,
1766
+ "loss": 2.8635,
1767
+ "step": 25100
1768
+ },
1769
+ {
1770
+ "epoch": 2.4609375,
1771
+ "grad_norm": 12.031999588012695,
1772
+ "learning_rate": 9.013671875e-06,
1773
+ "loss": 2.6288,
1774
+ "step": 25200
1775
+ },
1776
+ {
1777
+ "epoch": 2.470703125,
1778
+ "grad_norm": 10.63759994506836,
1779
+ "learning_rate": 8.850911458333335e-06,
1780
+ "loss": 2.7396,
1781
+ "step": 25300
1782
+ },
1783
+ {
1784
+ "epoch": 2.48046875,
1785
+ "grad_norm": 18.768993377685547,
1786
+ "learning_rate": 8.688151041666667e-06,
1787
+ "loss": 2.6733,
1788
+ "step": 25400
1789
+ },
1790
+ {
1791
+ "epoch": 2.490234375,
1792
+ "grad_norm": 9.411011695861816,
1793
+ "learning_rate": 8.525390625e-06,
1794
+ "loss": 2.7355,
1795
+ "step": 25500
1796
+ },
1797
+ {
1798
+ "epoch": 2.5,
1799
+ "grad_norm": 11.31039810180664,
1800
+ "learning_rate": 8.362630208333334e-06,
1801
+ "loss": 2.6648,
1802
+ "step": 25600
1803
+ },
1804
+ {
1805
+ "epoch": 2.509765625,
1806
+ "grad_norm": 11.34940242767334,
1807
+ "learning_rate": 8.199869791666667e-06,
1808
+ "loss": 2.6185,
1809
+ "step": 25700
1810
+ },
1811
+ {
1812
+ "epoch": 2.51953125,
1813
+ "grad_norm": 13.539913177490234,
1814
+ "learning_rate": 8.037109375e-06,
1815
+ "loss": 2.6344,
1816
+ "step": 25800
1817
+ },
1818
+ {
1819
+ "epoch": 2.529296875,
1820
+ "grad_norm": 12.985583305358887,
1821
+ "learning_rate": 7.874348958333334e-06,
1822
+ "loss": 2.7028,
1823
+ "step": 25900
1824
+ },
1825
+ {
1826
+ "epoch": 2.5390625,
1827
+ "grad_norm": 23.021692276000977,
1828
+ "learning_rate": 7.711588541666666e-06,
1829
+ "loss": 2.6719,
1830
+ "step": 26000
1831
+ },
1832
+ {
1833
+ "epoch": 2.548828125,
1834
+ "grad_norm": 14.796003341674805,
1835
+ "learning_rate": 7.548828125e-06,
1836
+ "loss": 2.669,
1837
+ "step": 26100
1838
+ },
1839
+ {
1840
+ "epoch": 2.55859375,
1841
+ "grad_norm": 16.303749084472656,
1842
+ "learning_rate": 7.386067708333334e-06,
1843
+ "loss": 2.5954,
1844
+ "step": 26200
1845
+ },
1846
+ {
1847
+ "epoch": 2.568359375,
1848
+ "grad_norm": 10.631623268127441,
1849
+ "learning_rate": 7.223307291666667e-06,
1850
+ "loss": 2.669,
1851
+ "step": 26300
1852
+ },
1853
+ {
1854
+ "epoch": 2.578125,
1855
+ "grad_norm": 13.405618667602539,
1856
+ "learning_rate": 7.060546875e-06,
1857
+ "loss": 2.7102,
1858
+ "step": 26400
1859
+ },
1860
+ {
1861
+ "epoch": 2.587890625,
1862
+ "grad_norm": 16.89972496032715,
1863
+ "learning_rate": 6.897786458333335e-06,
1864
+ "loss": 2.7198,
1865
+ "step": 26500
1866
+ },
1867
+ {
1868
+ "epoch": 2.59765625,
1869
+ "grad_norm": 14.60909652709961,
1870
+ "learning_rate": 6.735026041666667e-06,
1871
+ "loss": 2.686,
1872
+ "step": 26600
1873
+ },
1874
+ {
1875
+ "epoch": 2.607421875,
1876
+ "grad_norm": 16.859573364257812,
1877
+ "learning_rate": 6.572265625e-06,
1878
+ "loss": 2.7073,
1879
+ "step": 26700
1880
+ },
1881
+ {
1882
+ "epoch": 2.6171875,
1883
+ "grad_norm": 12.876221656799316,
1884
+ "learning_rate": 6.409505208333334e-06,
1885
+ "loss": 2.66,
1886
+ "step": 26800
1887
+ },
1888
+ {
1889
+ "epoch": 2.626953125,
1890
+ "grad_norm": 12.116389274597168,
1891
+ "learning_rate": 6.246744791666667e-06,
1892
+ "loss": 2.6773,
1893
+ "step": 26900
1894
+ },
1895
+ {
1896
+ "epoch": 2.63671875,
1897
+ "grad_norm": 13.397444725036621,
1898
+ "learning_rate": 6.083984375e-06,
1899
+ "loss": 2.5832,
1900
+ "step": 27000
1901
+ },
1902
+ {
1903
+ "epoch": 2.646484375,
1904
+ "grad_norm": 14.27937126159668,
1905
+ "learning_rate": 5.921223958333334e-06,
1906
+ "loss": 2.7137,
1907
+ "step": 27100
1908
+ },
1909
+ {
1910
+ "epoch": 2.65625,
1911
+ "grad_norm": 12.069489479064941,
1912
+ "learning_rate": 5.758463541666667e-06,
1913
+ "loss": 2.6435,
1914
+ "step": 27200
1915
+ },
1916
+ {
1917
+ "epoch": 2.666015625,
1918
+ "grad_norm": 11.179854393005371,
1919
+ "learning_rate": 5.595703125e-06,
1920
+ "loss": 2.707,
1921
+ "step": 27300
1922
+ },
1923
+ {
1924
+ "epoch": 2.67578125,
1925
+ "grad_norm": 11.071802139282227,
1926
+ "learning_rate": 5.432942708333333e-06,
1927
+ "loss": 2.6166,
1928
+ "step": 27400
1929
+ },
1930
+ {
1931
+ "epoch": 2.685546875,
1932
+ "grad_norm": 14.306278228759766,
1933
+ "learning_rate": 5.270182291666667e-06,
1934
+ "loss": 2.6747,
1935
+ "step": 27500
1936
+ },
1937
+ {
1938
+ "epoch": 2.6953125,
1939
+ "grad_norm": 14.3062744140625,
1940
+ "learning_rate": 5.107421875e-06,
1941
+ "loss": 2.5382,
1942
+ "step": 27600
1943
+ },
1944
+ {
1945
+ "epoch": 2.705078125,
1946
+ "grad_norm": 14.975716590881348,
1947
+ "learning_rate": 4.944661458333334e-06,
1948
+ "loss": 2.7215,
1949
+ "step": 27700
1950
+ },
1951
+ {
1952
+ "epoch": 2.71484375,
1953
+ "grad_norm": 14.584077835083008,
1954
+ "learning_rate": 4.781901041666667e-06,
1955
+ "loss": 2.7351,
1956
+ "step": 27800
1957
+ },
1958
+ {
1959
+ "epoch": 2.724609375,
1960
+ "grad_norm": 11.181657791137695,
1961
+ "learning_rate": 4.619140625e-06,
1962
+ "loss": 2.576,
1963
+ "step": 27900
1964
+ },
1965
+ {
1966
+ "epoch": 2.734375,
1967
+ "grad_norm": 10.974489212036133,
1968
+ "learning_rate": 4.456380208333333e-06,
1969
+ "loss": 2.7631,
1970
+ "step": 28000
1971
+ },
1972
+ {
1973
+ "epoch": 2.744140625,
1974
+ "grad_norm": 15.731523513793945,
1975
+ "learning_rate": 4.295247395833334e-06,
1976
+ "loss": 2.5512,
1977
+ "step": 28100
1978
+ },
1979
+ {
1980
+ "epoch": 2.75390625,
1981
+ "grad_norm": 12.23338794708252,
1982
+ "learning_rate": 4.132486979166667e-06,
1983
+ "loss": 2.6233,
1984
+ "step": 28200
1985
+ },
1986
+ {
1987
+ "epoch": 2.763671875,
1988
+ "grad_norm": 9.420981407165527,
1989
+ "learning_rate": 3.9697265625e-06,
1990
+ "loss": 2.6464,
1991
+ "step": 28300
1992
+ },
1993
+ {
1994
+ "epoch": 2.7734375,
1995
+ "grad_norm": 10.947881698608398,
1996
+ "learning_rate": 3.8069661458333335e-06,
1997
+ "loss": 2.6197,
1998
+ "step": 28400
1999
+ },
2000
+ {
2001
+ "epoch": 2.783203125,
2002
+ "grad_norm": 17.761030197143555,
2003
+ "learning_rate": 3.644205729166667e-06,
2004
+ "loss": 2.6341,
2005
+ "step": 28500
2006
+ },
2007
+ {
2008
+ "epoch": 2.79296875,
2009
+ "grad_norm": 15.768482208251953,
2010
+ "learning_rate": 3.4814453125e-06,
2011
+ "loss": 2.653,
2012
+ "step": 28600
2013
+ },
2014
+ {
2015
+ "epoch": 2.802734375,
2016
+ "grad_norm": 13.388958930969238,
2017
+ "learning_rate": 3.3186848958333335e-06,
2018
+ "loss": 2.5511,
2019
+ "step": 28700
2020
+ },
2021
+ {
2022
+ "epoch": 2.8125,
2023
+ "grad_norm": 11.864120483398438,
2024
+ "learning_rate": 3.155924479166667e-06,
2025
+ "loss": 2.7781,
2026
+ "step": 28800
2027
+ },
2028
+ {
2029
+ "epoch": 2.822265625,
2030
+ "grad_norm": 11.44416618347168,
2031
+ "learning_rate": 2.9931640625e-06,
2032
+ "loss": 2.6667,
2033
+ "step": 28900
2034
+ },
2035
+ {
2036
+ "epoch": 2.83203125,
2037
+ "grad_norm": 13.012479782104492,
2038
+ "learning_rate": 2.8304036458333335e-06,
2039
+ "loss": 2.6217,
2040
+ "step": 29000
2041
+ },
2042
+ {
2043
+ "epoch": 2.841796875,
2044
+ "grad_norm": 14.73644733428955,
2045
+ "learning_rate": 2.667643229166667e-06,
2046
+ "loss": 2.606,
2047
+ "step": 29100
2048
+ },
2049
+ {
2050
+ "epoch": 2.8515625,
2051
+ "grad_norm": 12.075024604797363,
2052
+ "learning_rate": 2.5048828125e-06,
2053
+ "loss": 2.5863,
2054
+ "step": 29200
2055
+ },
2056
+ {
2057
+ "epoch": 2.861328125,
2058
+ "grad_norm": 14.664973258972168,
2059
+ "learning_rate": 2.3421223958333335e-06,
2060
+ "loss": 2.7423,
2061
+ "step": 29300
2062
+ },
2063
+ {
2064
+ "epoch": 2.87109375,
2065
+ "grad_norm": 14.096378326416016,
2066
+ "learning_rate": 2.179361979166667e-06,
2067
+ "loss": 2.7122,
2068
+ "step": 29400
2069
+ },
2070
+ {
2071
+ "epoch": 2.880859375,
2072
+ "grad_norm": 12.812738418579102,
2073
+ "learning_rate": 2.0166015625e-06,
2074
+ "loss": 2.7648,
2075
+ "step": 29500
2076
+ },
2077
+ {
2078
+ "epoch": 2.890625,
2079
+ "grad_norm": 16.189590454101562,
2080
+ "learning_rate": 1.8538411458333335e-06,
2081
+ "loss": 2.669,
2082
+ "step": 29600
2083
+ },
2084
+ {
2085
+ "epoch": 2.900390625,
2086
+ "grad_norm": 14.232686996459961,
2087
+ "learning_rate": 1.6910807291666667e-06,
2088
+ "loss": 2.6242,
2089
+ "step": 29700
2090
+ },
2091
+ {
2092
+ "epoch": 2.91015625,
2093
+ "grad_norm": 16.499113082885742,
2094
+ "learning_rate": 1.5283203125000002e-06,
2095
+ "loss": 2.7261,
2096
+ "step": 29800
2097
+ },
2098
+ {
2099
+ "epoch": 2.919921875,
2100
+ "grad_norm": 10.421613693237305,
2101
+ "learning_rate": 1.3655598958333332e-06,
2102
+ "loss": 2.5805,
2103
+ "step": 29900
2104
+ },
2105
+ {
2106
+ "epoch": 2.9296875,
2107
+ "grad_norm": 11.867895126342773,
2108
+ "learning_rate": 1.2027994791666667e-06,
2109
+ "loss": 2.6225,
2110
+ "step": 30000
2111
+ },
2112
+ {
2113
+ "epoch": 2.939453125,
2114
+ "grad_norm": 14.396392822265625,
2115
+ "learning_rate": 1.0400390625000002e-06,
2116
+ "loss": 2.591,
2117
+ "step": 30100
2118
+ },
2119
+ {
2120
+ "epoch": 2.94921875,
2121
+ "grad_norm": 26.637563705444336,
2122
+ "learning_rate": 8.772786458333333e-07,
2123
+ "loss": 2.6324,
2124
+ "step": 30200
2125
+ },
2126
+ {
2127
+ "epoch": 2.958984375,
2128
+ "grad_norm": 13.43743896484375,
2129
+ "learning_rate": 7.145182291666667e-07,
2130
+ "loss": 2.7171,
2131
+ "step": 30300
2132
+ },
2133
+ {
2134
+ "epoch": 2.96875,
2135
+ "grad_norm": 13.423970222473145,
2136
+ "learning_rate": 5.517578125000001e-07,
2137
+ "loss": 2.5509,
2138
+ "step": 30400
2139
+ },
2140
+ {
2141
+ "epoch": 2.978515625,
2142
+ "grad_norm": 12.26883316040039,
2143
+ "learning_rate": 3.889973958333334e-07,
2144
+ "loss": 2.6303,
2145
+ "step": 30500
2146
+ },
2147
+ {
2148
+ "epoch": 2.98828125,
2149
+ "grad_norm": 13.305875778198242,
2150
+ "learning_rate": 2.262369791666667e-07,
2151
+ "loss": 2.6542,
2152
+ "step": 30600
2153
+ },
2154
+ {
2155
+ "epoch": 2.998046875,
2156
+ "grad_norm": 18.230594635009766,
2157
+ "learning_rate": 6.347656250000001e-08,
2158
+ "loss": 2.6088,
2159
+ "step": 30700
2160
+ },
2161
+ {
2162
+ "epoch": 3.0,
2163
+ "step": 30720,
2164
+ "total_flos": 4.206282006594048e+16,
2165
+ "train_loss": 3.0830205624302227,
2166
+ "train_runtime": 6816.3036,
2167
+ "train_samples_per_second": 72.109,
2168
+ "train_steps_per_second": 4.507
2169
+ }
2170
+ ],
2171
+ "logging_steps": 100,
2172
+ "max_steps": 30720,
2173
+ "num_input_tokens_seen": 0,
2174
+ "num_train_epochs": 3,
2175
+ "save_steps": 1000,
2176
+ "stateful_callbacks": {
2177
+ "TrainerControl": {
2178
+ "args": {
2179
+ "should_epoch_stop": false,
2180
+ "should_evaluate": false,
2181
+ "should_log": false,
2182
+ "should_save": true,
2183
+ "should_training_stop": true
2184
+ },
2185
+ "attributes": {}
2186
+ }
2187
+ },
2188
+ "total_flos": 4.206282006594048e+16,
2189
+ "train_batch_size": 16,
2190
+ "trial_name": null,
2191
+ "trial_params": null
2192
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65446e104857d9065085080ad419ad131c6bbd56fe4e6736d3aa6873e107fd71
3
+ size 5304