alexshengzhili commited on
Commit
2480684
·
1 Parent(s): 26e5a4f

Upload folder using huggingface_hub

Browse files
added_tokens.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "<im_end>": 32003,
3
+ "<im_patch>": 32001,
4
+ "<im_start>": 32002,
5
+ "[PAD]": 32000
6
+ }
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "alexshengzhili/vicuna-7b-v0",
3
+ "architectures": [
4
+ "LlavaLlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "freeze_mm_mlp_adapter": false,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 11008,
13
+ "mm_hidden_size": 1024,
14
+ "mm_use_im_start_end": true,
15
+ "mm_vision_select_layer": -2,
16
+ "mm_vision_tower": "openai/clip-vit-large-patch14",
17
+ "model_type": "llava",
18
+ "num_attention_heads": 32,
19
+ "num_hidden_layers": 32,
20
+ "pad_token_id": 0,
21
+ "rms_norm_eps": 1e-06,
22
+ "sep_image_conv_front": false,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float32",
25
+ "transformers_version": "4.28.0.dev0",
26
+ "tune_mm_mlp_adapter": false,
27
+ "use_cache": false,
28
+ "use_mm_proj": true,
29
+ "vocab_size": 32004
30
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.28.0.dev0"
7
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0589f81e58a6b58144ce5670f10fdcf5ffa9471ca405d1fc1bfab8a72f53e0b7
3
+ size 6742674890
pytorch_model-00001-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebec5d6826dc0fa5080558695efd48a9ee5585da37664ed2f08e7fe60db4f5ec
3
+ size 9878059026
pytorch_model-00002-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5432547201c8b5aa2de649ae007fa3362009d1bebe81bdf3493afd88137b7252
3
+ size 9894805046
pytorch_model-00003-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4653b00a31d598a6c6df3c29b8cb052a9a17af88ef72adfebb820954cae7b65
3
+ size 7197853429
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,332 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 26970595328
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00003-of-00003.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00003.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
17
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
18
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
19
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
20
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
21
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
22
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
23
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
24
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
25
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
27
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
28
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
29
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
30
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
31
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
32
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
33
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
34
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
35
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
37
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
38
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
39
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
40
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
41
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
42
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
43
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
44
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
45
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
47
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
48
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
49
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
50
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
51
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
52
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
53
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
54
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
55
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
57
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
58
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
59
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
60
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
61
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
62
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
63
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
64
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
65
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
67
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
68
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
69
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
70
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
71
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
72
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
73
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
74
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
75
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
77
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
78
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
79
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
80
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
81
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
82
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
83
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
84
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
85
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
87
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
88
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
89
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
90
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
91
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
92
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
93
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
94
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
95
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
97
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
98
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
99
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
100
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
101
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
102
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
103
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
104
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
105
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
107
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
108
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
109
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
110
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
111
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
112
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
113
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
114
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
115
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
117
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
118
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
119
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
120
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
121
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
122
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
123
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
124
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
125
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
127
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
128
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
129
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
130
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
131
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
132
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
133
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
134
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
135
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
137
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
138
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
139
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
140
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
141
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
142
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
143
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
144
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
145
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
147
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
148
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
149
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
150
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
151
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
152
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
153
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
154
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
155
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
157
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
158
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
159
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
160
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
161
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
162
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
163
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
164
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
165
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
167
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
168
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
169
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
170
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
171
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
172
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
173
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
174
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
175
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
177
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
178
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
179
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
180
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
181
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
182
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
183
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
184
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
185
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
187
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
188
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
189
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
190
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
191
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
192
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
193
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
194
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
195
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
197
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
198
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
199
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
200
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
201
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
202
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
203
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
204
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
205
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
207
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
208
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
209
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
210
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
211
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
212
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
213
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
214
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
215
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
217
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
218
+ "model.layers.28.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
219
+ "model.layers.28.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
220
+ "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
221
+ "model.layers.28.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
222
+ "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
223
+ "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
224
+ "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
225
+ "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
227
+ "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
228
+ "model.layers.29.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
229
+ "model.layers.29.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
230
+ "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
231
+ "model.layers.29.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
232
+ "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
233
+ "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
234
+ "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
235
+ "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
237
+ "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
238
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
239
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
240
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
241
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
242
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
243
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
244
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
245
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
247
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
248
+ "model.layers.30.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
249
+ "model.layers.30.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
250
+ "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
251
+ "model.layers.30.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
252
+ "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
253
+ "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
254
+ "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
255
+ "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
257
+ "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
258
+ "model.layers.31.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
259
+ "model.layers.31.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
260
+ "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
261
+ "model.layers.31.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
262
+ "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
263
+ "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
264
+ "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
265
+ "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
267
+ "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
268
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
269
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
270
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
271
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
272
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
273
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
274
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
275
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
277
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
278
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
279
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
280
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
281
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
282
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
283
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
284
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
285
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
287
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
288
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
289
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
290
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
291
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
292
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
293
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
294
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
295
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
297
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
298
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
299
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
300
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
301
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
302
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
303
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
304
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
305
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
307
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
308
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
309
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
310
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
311
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
312
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
313
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
314
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
315
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
317
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
318
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
319
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
320
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
321
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
322
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
323
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
324
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
325
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
327
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
328
+ "model.mm_projector.bias": "pytorch_model-00003-of-00003.bin",
329
+ "model.mm_projector.weight": "pytorch_model-00003-of-00003.bin",
330
+ "model.norm.weight": "pytorch_model-00003-of-00003.bin"
331
+ }
332
+ }
rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a961c7aa8293531040f379f129f1930093670f11c91d3736bf6d8518af2b1507
3
+ size 14583
rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbc4475e8615a434844642d2b6db8c82a3eb4d1532c0358142650af35982f7d4
3
+ size 14583
rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:881eb4ea22a97cf8b6f92b147ff4c6e2ae57dcc7e36f52b570b5be3eeb7887b6
3
+ size 14583
rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b494daea51340989a686bc325649dfd2d62f65af56b0d25143e33790c8a957a
3
+ size 14583
rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a88b3a9b9d34f9c56a628c06043f3d84338d75c0f6730bf31d21d05f60a24e7
3
+ size 14583
rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1556e939ee7eca3367a22108d1bd8653197711d2ab29b9fc7633bec21b96125
3
+ size 14583
rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86e9ed27d35f586c7ec30600373ad7abf89086510211847110cb4a979365cfa0
3
+ size 14583
rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1828f5f1b78bdfd59c65a849162815831fbce941a063f6c4056e93648685a2ec
3
+ size 14583
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31a263831488bd0dc14c4e90b72806a981f938f1c27e60121054b4dc51b2c55a
3
+ size 627
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "[PAD]",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "__type": "AddedToken",
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ "clean_up_tokenization_spaces": false,
11
+ "eos_token": {
12
+ "__type": "AddedToken",
13
+ "content": "</s>",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false
18
+ },
19
+ "model_max_length": 2048,
20
+ "pad_token": null,
21
+ "padding_side": "right",
22
+ "special_tokens_map_file": "/root/.cache/huggingface/hub/models--lmsys--vicuna-7b-delta-v1.1/snapshots/59055f912e1c5cee762a793eda99d51e80807f11/special_tokens_map.json",
23
+ "tokenizer_class": "LlamaTokenizer",
24
+ "unk_token": {
25
+ "__type": "AddedToken",
26
+ "content": "<unk>",
27
+ "lstrip": false,
28
+ "normalized": true,
29
+ "rstrip": false,
30
+ "single_word": false
31
+ }
32
+ }
trainer_state.json ADDED
@@ -0,0 +1,3016 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.1111111111111112,
5
+ "global_step": 500,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.0,
12
+ "learning_rate": 2.9268292682926833e-06,
13
+ "loss": 2.0292,
14
+ "step": 1
15
+ },
16
+ {
17
+ "epoch": 0.0,
18
+ "learning_rate": 5.853658536585367e-06,
19
+ "loss": 2.0417,
20
+ "step": 2
21
+ },
22
+ {
23
+ "epoch": 0.01,
24
+ "learning_rate": 8.780487804878048e-06,
25
+ "loss": 1.5471,
26
+ "step": 3
27
+ },
28
+ {
29
+ "epoch": 0.01,
30
+ "learning_rate": 1.1707317073170733e-05,
31
+ "loss": 1.4769,
32
+ "step": 4
33
+ },
34
+ {
35
+ "epoch": 0.01,
36
+ "learning_rate": 1.4634146341463415e-05,
37
+ "loss": 1.3682,
38
+ "step": 5
39
+ },
40
+ {
41
+ "epoch": 0.01,
42
+ "learning_rate": 1.7560975609756096e-05,
43
+ "loss": 1.3256,
44
+ "step": 6
45
+ },
46
+ {
47
+ "epoch": 0.02,
48
+ "learning_rate": 2.048780487804878e-05,
49
+ "loss": 1.317,
50
+ "step": 7
51
+ },
52
+ {
53
+ "epoch": 0.02,
54
+ "learning_rate": 2.3414634146341466e-05,
55
+ "loss": 1.3441,
56
+ "step": 8
57
+ },
58
+ {
59
+ "epoch": 0.02,
60
+ "learning_rate": 2.6341463414634148e-05,
61
+ "loss": 1.3028,
62
+ "step": 9
63
+ },
64
+ {
65
+ "epoch": 0.02,
66
+ "learning_rate": 2.926829268292683e-05,
67
+ "loss": 1.3212,
68
+ "step": 10
69
+ },
70
+ {
71
+ "epoch": 0.02,
72
+ "learning_rate": 3.2195121951219514e-05,
73
+ "loss": 1.2535,
74
+ "step": 11
75
+ },
76
+ {
77
+ "epoch": 0.03,
78
+ "learning_rate": 3.512195121951219e-05,
79
+ "loss": 1.2933,
80
+ "step": 12
81
+ },
82
+ {
83
+ "epoch": 0.03,
84
+ "learning_rate": 3.804878048780488e-05,
85
+ "loss": 1.2811,
86
+ "step": 13
87
+ },
88
+ {
89
+ "epoch": 0.03,
90
+ "learning_rate": 4.097560975609756e-05,
91
+ "loss": 1.304,
92
+ "step": 14
93
+ },
94
+ {
95
+ "epoch": 0.03,
96
+ "learning_rate": 4.390243902439024e-05,
97
+ "loss": 1.3205,
98
+ "step": 15
99
+ },
100
+ {
101
+ "epoch": 0.04,
102
+ "learning_rate": 4.682926829268293e-05,
103
+ "loss": 1.2825,
104
+ "step": 16
105
+ },
106
+ {
107
+ "epoch": 0.04,
108
+ "learning_rate": 4.975609756097561e-05,
109
+ "loss": 1.2774,
110
+ "step": 17
111
+ },
112
+ {
113
+ "epoch": 0.04,
114
+ "learning_rate": 5.2682926829268296e-05,
115
+ "loss": 1.2641,
116
+ "step": 18
117
+ },
118
+ {
119
+ "epoch": 0.04,
120
+ "learning_rate": 5.560975609756098e-05,
121
+ "loss": 1.2429,
122
+ "step": 19
123
+ },
124
+ {
125
+ "epoch": 0.04,
126
+ "learning_rate": 5.853658536585366e-05,
127
+ "loss": 1.2503,
128
+ "step": 20
129
+ },
130
+ {
131
+ "epoch": 0.05,
132
+ "learning_rate": 6.146341463414634e-05,
133
+ "loss": 1.2882,
134
+ "step": 21
135
+ },
136
+ {
137
+ "epoch": 0.05,
138
+ "learning_rate": 6.439024390243903e-05,
139
+ "loss": 1.3068,
140
+ "step": 22
141
+ },
142
+ {
143
+ "epoch": 0.05,
144
+ "learning_rate": 6.731707317073171e-05,
145
+ "loss": 1.2592,
146
+ "step": 23
147
+ },
148
+ {
149
+ "epoch": 0.05,
150
+ "learning_rate": 7.024390243902439e-05,
151
+ "loss": 1.2538,
152
+ "step": 24
153
+ },
154
+ {
155
+ "epoch": 0.06,
156
+ "learning_rate": 7.317073170731707e-05,
157
+ "loss": 1.3129,
158
+ "step": 25
159
+ },
160
+ {
161
+ "epoch": 0.06,
162
+ "learning_rate": 7.609756097560976e-05,
163
+ "loss": 1.2578,
164
+ "step": 26
165
+ },
166
+ {
167
+ "epoch": 0.06,
168
+ "learning_rate": 7.902439024390244e-05,
169
+ "loss": 1.2534,
170
+ "step": 27
171
+ },
172
+ {
173
+ "epoch": 0.06,
174
+ "learning_rate": 8.195121951219513e-05,
175
+ "loss": 1.3043,
176
+ "step": 28
177
+ },
178
+ {
179
+ "epoch": 0.06,
180
+ "learning_rate": 8.48780487804878e-05,
181
+ "loss": 1.2677,
182
+ "step": 29
183
+ },
184
+ {
185
+ "epoch": 0.07,
186
+ "learning_rate": 8.780487804878048e-05,
187
+ "loss": 1.2535,
188
+ "step": 30
189
+ },
190
+ {
191
+ "epoch": 0.07,
192
+ "learning_rate": 9.073170731707318e-05,
193
+ "loss": 1.2639,
194
+ "step": 31
195
+ },
196
+ {
197
+ "epoch": 0.07,
198
+ "learning_rate": 9.365853658536587e-05,
199
+ "loss": 1.3038,
200
+ "step": 32
201
+ },
202
+ {
203
+ "epoch": 0.07,
204
+ "learning_rate": 9.658536585365855e-05,
205
+ "loss": 1.2966,
206
+ "step": 33
207
+ },
208
+ {
209
+ "epoch": 0.08,
210
+ "learning_rate": 9.951219512195122e-05,
211
+ "loss": 1.2355,
212
+ "step": 34
213
+ },
214
+ {
215
+ "epoch": 0.08,
216
+ "learning_rate": 0.0001024390243902439,
217
+ "loss": 1.2861,
218
+ "step": 35
219
+ },
220
+ {
221
+ "epoch": 0.08,
222
+ "learning_rate": 0.00010536585365853659,
223
+ "loss": 1.3257,
224
+ "step": 36
225
+ },
226
+ {
227
+ "epoch": 0.08,
228
+ "learning_rate": 0.00010829268292682928,
229
+ "loss": 1.3195,
230
+ "step": 37
231
+ },
232
+ {
233
+ "epoch": 0.08,
234
+ "learning_rate": 0.00011121951219512196,
235
+ "loss": 1.3274,
236
+ "step": 38
237
+ },
238
+ {
239
+ "epoch": 0.09,
240
+ "learning_rate": 0.00011414634146341463,
241
+ "loss": 1.2388,
242
+ "step": 39
243
+ },
244
+ {
245
+ "epoch": 0.09,
246
+ "learning_rate": 0.00011707317073170732,
247
+ "loss": 1.2686,
248
+ "step": 40
249
+ },
250
+ {
251
+ "epoch": 0.09,
252
+ "learning_rate": 0.00012,
253
+ "loss": 1.274,
254
+ "step": 41
255
+ },
256
+ {
257
+ "epoch": 0.09,
258
+ "learning_rate": 0.00011999982720089112,
259
+ "loss": 1.2754,
260
+ "step": 42
261
+ },
262
+ {
263
+ "epoch": 0.1,
264
+ "learning_rate": 0.00011999930880455974,
265
+ "loss": 1.2609,
266
+ "step": 43
267
+ },
268
+ {
269
+ "epoch": 0.1,
270
+ "learning_rate": 0.00011999844481399185,
271
+ "loss": 1.3151,
272
+ "step": 44
273
+ },
274
+ {
275
+ "epoch": 0.1,
276
+ "learning_rate": 0.00011999723523416397,
277
+ "loss": 1.3002,
278
+ "step": 45
279
+ },
280
+ {
281
+ "epoch": 0.1,
282
+ "learning_rate": 0.00011999568007204328,
283
+ "loss": 1.2993,
284
+ "step": 46
285
+ },
286
+ {
287
+ "epoch": 0.1,
288
+ "learning_rate": 0.00011999377933658745,
289
+ "loss": 1.301,
290
+ "step": 47
291
+ },
292
+ {
293
+ "epoch": 0.11,
294
+ "learning_rate": 0.00011999153303874466,
295
+ "loss": 1.3308,
296
+ "step": 48
297
+ },
298
+ {
299
+ "epoch": 0.11,
300
+ "learning_rate": 0.00011998894119145353,
301
+ "loss": 1.2533,
302
+ "step": 49
303
+ },
304
+ {
305
+ "epoch": 0.11,
306
+ "learning_rate": 0.00011998600380964302,
307
+ "loss": 1.2932,
308
+ "step": 50
309
+ },
310
+ {
311
+ "epoch": 0.11,
312
+ "learning_rate": 0.00011998272091023235,
313
+ "loss": 1.2577,
314
+ "step": 51
315
+ },
316
+ {
317
+ "epoch": 0.12,
318
+ "learning_rate": 0.00011997909251213094,
319
+ "loss": 1.3185,
320
+ "step": 52
321
+ },
322
+ {
323
+ "epoch": 0.12,
324
+ "learning_rate": 0.00011997511863623823,
325
+ "loss": 1.2698,
326
+ "step": 53
327
+ },
328
+ {
329
+ "epoch": 0.12,
330
+ "learning_rate": 0.00011997079930544366,
331
+ "loss": 1.2681,
332
+ "step": 54
333
+ },
334
+ {
335
+ "epoch": 0.12,
336
+ "learning_rate": 0.00011996613454462643,
337
+ "loss": 1.2688,
338
+ "step": 55
339
+ },
340
+ {
341
+ "epoch": 0.12,
342
+ "learning_rate": 0.0001199611243806554,
343
+ "loss": 1.2947,
344
+ "step": 56
345
+ },
346
+ {
347
+ "epoch": 0.13,
348
+ "learning_rate": 0.000119955768842389,
349
+ "loss": 1.3007,
350
+ "step": 57
351
+ },
352
+ {
353
+ "epoch": 0.13,
354
+ "learning_rate": 0.00011995006796067497,
355
+ "loss": 1.2609,
356
+ "step": 58
357
+ },
358
+ {
359
+ "epoch": 0.13,
360
+ "learning_rate": 0.00011994402176835021,
361
+ "loss": 1.3019,
362
+ "step": 59
363
+ },
364
+ {
365
+ "epoch": 0.13,
366
+ "learning_rate": 0.0001199376303002406,
367
+ "loss": 1.3291,
368
+ "step": 60
369
+ },
370
+ {
371
+ "epoch": 0.14,
372
+ "learning_rate": 0.00011993089359316082,
373
+ "loss": 1.271,
374
+ "step": 61
375
+ },
376
+ {
377
+ "epoch": 0.14,
378
+ "learning_rate": 0.00011992381168591412,
379
+ "loss": 1.3238,
380
+ "step": 62
381
+ },
382
+ {
383
+ "epoch": 0.14,
384
+ "learning_rate": 0.00011991638461929203,
385
+ "loss": 1.325,
386
+ "step": 63
387
+ },
388
+ {
389
+ "epoch": 0.14,
390
+ "learning_rate": 0.00011990861243607424,
391
+ "loss": 1.249,
392
+ "step": 64
393
+ },
394
+ {
395
+ "epoch": 0.14,
396
+ "learning_rate": 0.00011990049518102833,
397
+ "loss": 1.2804,
398
+ "step": 65
399
+ },
400
+ {
401
+ "epoch": 0.15,
402
+ "learning_rate": 0.00011989203290090944,
403
+ "loss": 1.2872,
404
+ "step": 66
405
+ },
406
+ {
407
+ "epoch": 0.15,
408
+ "learning_rate": 0.00011988322564446003,
409
+ "loss": 1.3123,
410
+ "step": 67
411
+ },
412
+ {
413
+ "epoch": 0.15,
414
+ "learning_rate": 0.00011987407346240964,
415
+ "loss": 1.3108,
416
+ "step": 68
417
+ },
418
+ {
419
+ "epoch": 0.15,
420
+ "learning_rate": 0.00011986457640747457,
421
+ "loss": 1.3069,
422
+ "step": 69
423
+ },
424
+ {
425
+ "epoch": 0.16,
426
+ "learning_rate": 0.00011985473453435758,
427
+ "loss": 1.2944,
428
+ "step": 70
429
+ },
430
+ {
431
+ "epoch": 0.16,
432
+ "learning_rate": 0.00011984454789974758,
433
+ "loss": 1.2525,
434
+ "step": 71
435
+ },
436
+ {
437
+ "epoch": 0.16,
438
+ "learning_rate": 0.00011983401656231926,
439
+ "loss": 1.2582,
440
+ "step": 72
441
+ },
442
+ {
443
+ "epoch": 0.16,
444
+ "learning_rate": 0.0001198231405827328,
445
+ "loss": 1.3009,
446
+ "step": 73
447
+ },
448
+ {
449
+ "epoch": 0.16,
450
+ "learning_rate": 0.00011981192002363357,
451
+ "loss": 1.3136,
452
+ "step": 74
453
+ },
454
+ {
455
+ "epoch": 0.17,
456
+ "learning_rate": 0.00011980035494965159,
457
+ "loss": 1.2795,
458
+ "step": 75
459
+ },
460
+ {
461
+ "epoch": 0.17,
462
+ "learning_rate": 0.0001197884454274014,
463
+ "loss": 1.3302,
464
+ "step": 76
465
+ },
466
+ {
467
+ "epoch": 0.17,
468
+ "learning_rate": 0.00011977619152548147,
469
+ "loss": 1.3062,
470
+ "step": 77
471
+ },
472
+ {
473
+ "epoch": 0.17,
474
+ "learning_rate": 0.0001197635933144739,
475
+ "loss": 1.2965,
476
+ "step": 78
477
+ },
478
+ {
479
+ "epoch": 0.18,
480
+ "learning_rate": 0.00011975065086694404,
481
+ "loss": 1.27,
482
+ "step": 79
483
+ },
484
+ {
485
+ "epoch": 0.18,
486
+ "learning_rate": 0.00011973736425743998,
487
+ "loss": 1.2587,
488
+ "step": 80
489
+ },
490
+ {
491
+ "epoch": 0.18,
492
+ "learning_rate": 0.0001197237335624922,
493
+ "loss": 1.3065,
494
+ "step": 81
495
+ },
496
+ {
497
+ "epoch": 0.18,
498
+ "learning_rate": 0.00011970975886061309,
499
+ "loss": 1.2612,
500
+ "step": 82
501
+ },
502
+ {
503
+ "epoch": 0.18,
504
+ "learning_rate": 0.00011969544023229654,
505
+ "loss": 1.317,
506
+ "step": 83
507
+ },
508
+ {
509
+ "epoch": 0.19,
510
+ "learning_rate": 0.00011968077776001742,
511
+ "loss": 1.3093,
512
+ "step": 84
513
+ },
514
+ {
515
+ "epoch": 0.19,
516
+ "learning_rate": 0.00011966577152823111,
517
+ "loss": 1.2632,
518
+ "step": 85
519
+ },
520
+ {
521
+ "epoch": 0.19,
522
+ "learning_rate": 0.00011965042162337308,
523
+ "loss": 1.2846,
524
+ "step": 86
525
+ },
526
+ {
527
+ "epoch": 0.19,
528
+ "learning_rate": 0.00011963472813385833,
529
+ "loss": 1.3133,
530
+ "step": 87
531
+ },
532
+ {
533
+ "epoch": 0.2,
534
+ "learning_rate": 0.00011961869115008088,
535
+ "loss": 1.3272,
536
+ "step": 88
537
+ },
538
+ {
539
+ "epoch": 0.2,
540
+ "learning_rate": 0.0001196023107644133,
541
+ "loss": 1.2728,
542
+ "step": 89
543
+ },
544
+ {
545
+ "epoch": 0.2,
546
+ "learning_rate": 0.0001195855870712061,
547
+ "loss": 1.2478,
548
+ "step": 90
549
+ },
550
+ {
551
+ "epoch": 0.2,
552
+ "learning_rate": 0.00011956852016678727,
553
+ "loss": 1.2854,
554
+ "step": 91
555
+ },
556
+ {
557
+ "epoch": 0.2,
558
+ "learning_rate": 0.00011955111014946166,
559
+ "loss": 1.2712,
560
+ "step": 92
561
+ },
562
+ {
563
+ "epoch": 0.21,
564
+ "learning_rate": 0.00011953335711951047,
565
+ "loss": 1.2605,
566
+ "step": 93
567
+ },
568
+ {
569
+ "epoch": 0.21,
570
+ "learning_rate": 0.00011951526117919063,
571
+ "loss": 1.3021,
572
+ "step": 94
573
+ },
574
+ {
575
+ "epoch": 0.21,
576
+ "learning_rate": 0.00011949682243273419,
577
+ "loss": 1.2482,
578
+ "step": 95
579
+ },
580
+ {
581
+ "epoch": 0.21,
582
+ "learning_rate": 0.0001194780409863478,
583
+ "loss": 1.2801,
584
+ "step": 96
585
+ },
586
+ {
587
+ "epoch": 0.22,
588
+ "learning_rate": 0.00011945891694821206,
589
+ "loss": 1.2746,
590
+ "step": 97
591
+ },
592
+ {
593
+ "epoch": 0.22,
594
+ "learning_rate": 0.0001194394504284808,
595
+ "loss": 1.28,
596
+ "step": 98
597
+ },
598
+ {
599
+ "epoch": 0.22,
600
+ "learning_rate": 0.00011941964153928065,
601
+ "loss": 1.2296,
602
+ "step": 99
603
+ },
604
+ {
605
+ "epoch": 0.22,
606
+ "learning_rate": 0.00011939949039471018,
607
+ "loss": 1.2531,
608
+ "step": 100
609
+ },
610
+ {
611
+ "epoch": 0.22,
612
+ "learning_rate": 0.00011937899711083942,
613
+ "loss": 1.2641,
614
+ "step": 101
615
+ },
616
+ {
617
+ "epoch": 0.23,
618
+ "learning_rate": 0.00011935816180570905,
619
+ "loss": 1.3043,
620
+ "step": 102
621
+ },
622
+ {
623
+ "epoch": 0.23,
624
+ "learning_rate": 0.00011933698459932983,
625
+ "loss": 1.2511,
626
+ "step": 103
627
+ },
628
+ {
629
+ "epoch": 0.23,
630
+ "learning_rate": 0.00011931546561368184,
631
+ "loss": 1.2838,
632
+ "step": 104
633
+ },
634
+ {
635
+ "epoch": 0.23,
636
+ "learning_rate": 0.00011929360497271377,
637
+ "loss": 1.2616,
638
+ "step": 105
639
+ },
640
+ {
641
+ "epoch": 0.24,
642
+ "learning_rate": 0.0001192714028023423,
643
+ "loss": 1.3006,
644
+ "step": 106
645
+ },
646
+ {
647
+ "epoch": 0.24,
648
+ "learning_rate": 0.00011924885923045124,
649
+ "loss": 1.2762,
650
+ "step": 107
651
+ },
652
+ {
653
+ "epoch": 0.24,
654
+ "learning_rate": 0.00011922597438689093,
655
+ "loss": 1.2797,
656
+ "step": 108
657
+ },
658
+ {
659
+ "epoch": 0.24,
660
+ "learning_rate": 0.00011920274840347734,
661
+ "loss": 1.2526,
662
+ "step": 109
663
+ },
664
+ {
665
+ "epoch": 0.24,
666
+ "learning_rate": 0.00011917918141399149,
667
+ "loss": 1.2742,
668
+ "step": 110
669
+ },
670
+ {
671
+ "epoch": 0.25,
672
+ "learning_rate": 0.0001191552735541785,
673
+ "loss": 1.2698,
674
+ "step": 111
675
+ },
676
+ {
677
+ "epoch": 0.25,
678
+ "learning_rate": 0.00011913102496174698,
679
+ "loss": 1.2516,
680
+ "step": 112
681
+ },
682
+ {
683
+ "epoch": 0.25,
684
+ "learning_rate": 0.00011910643577636807,
685
+ "loss": 1.2814,
686
+ "step": 113
687
+ },
688
+ {
689
+ "epoch": 0.25,
690
+ "learning_rate": 0.00011908150613967473,
691
+ "loss": 1.3506,
692
+ "step": 114
693
+ },
694
+ {
695
+ "epoch": 0.26,
696
+ "learning_rate": 0.00011905623619526097,
697
+ "loss": 1.3044,
698
+ "step": 115
699
+ },
700
+ {
701
+ "epoch": 0.26,
702
+ "learning_rate": 0.0001190306260886809,
703
+ "loss": 1.2427,
704
+ "step": 116
705
+ },
706
+ {
707
+ "epoch": 0.26,
708
+ "learning_rate": 0.00011900467596744797,
709
+ "loss": 1.3192,
710
+ "step": 117
711
+ },
712
+ {
713
+ "epoch": 0.26,
714
+ "learning_rate": 0.00011897838598103412,
715
+ "loss": 1.2457,
716
+ "step": 118
717
+ },
718
+ {
719
+ "epoch": 0.26,
720
+ "learning_rate": 0.00011895175628086887,
721
+ "loss": 1.2686,
722
+ "step": 119
723
+ },
724
+ {
725
+ "epoch": 0.27,
726
+ "learning_rate": 0.0001189247870203385,
727
+ "loss": 1.2782,
728
+ "step": 120
729
+ },
730
+ {
731
+ "epoch": 0.27,
732
+ "learning_rate": 0.00011889747835478518,
733
+ "loss": 1.3098,
734
+ "step": 121
735
+ },
736
+ {
737
+ "epoch": 0.27,
738
+ "learning_rate": 0.00011886983044150598,
739
+ "loss": 1.266,
740
+ "step": 122
741
+ },
742
+ {
743
+ "epoch": 0.27,
744
+ "learning_rate": 0.00011884184343975209,
745
+ "loss": 1.2918,
746
+ "step": 123
747
+ },
748
+ {
749
+ "epoch": 0.28,
750
+ "learning_rate": 0.00011881351751072778,
751
+ "loss": 1.2611,
752
+ "step": 124
753
+ },
754
+ {
755
+ "epoch": 0.28,
756
+ "learning_rate": 0.00011878485281758958,
757
+ "loss": 1.2192,
758
+ "step": 125
759
+ },
760
+ {
761
+ "epoch": 0.28,
762
+ "learning_rate": 0.00011875584952544527,
763
+ "loss": 1.3018,
764
+ "step": 126
765
+ },
766
+ {
767
+ "epoch": 0.28,
768
+ "learning_rate": 0.00011872650780135294,
769
+ "loss": 1.304,
770
+ "step": 127
771
+ },
772
+ {
773
+ "epoch": 0.28,
774
+ "learning_rate": 0.00011869682781432005,
775
+ "loss": 1.3303,
776
+ "step": 128
777
+ },
778
+ {
779
+ "epoch": 0.29,
780
+ "learning_rate": 0.00011866680973530246,
781
+ "loss": 1.2869,
782
+ "step": 129
783
+ },
784
+ {
785
+ "epoch": 0.29,
786
+ "learning_rate": 0.00011863645373720338,
787
+ "loss": 1.2533,
788
+ "step": 130
789
+ },
790
+ {
791
+ "epoch": 0.29,
792
+ "learning_rate": 0.00011860575999487249,
793
+ "loss": 1.2678,
794
+ "step": 131
795
+ },
796
+ {
797
+ "epoch": 0.29,
798
+ "learning_rate": 0.00011857472868510483,
799
+ "loss": 1.2895,
800
+ "step": 132
801
+ },
802
+ {
803
+ "epoch": 0.3,
804
+ "learning_rate": 0.0001185433599866398,
805
+ "loss": 1.3199,
806
+ "step": 133
807
+ },
808
+ {
809
+ "epoch": 0.3,
810
+ "learning_rate": 0.0001185116540801602,
811
+ "loss": 1.264,
812
+ "step": 134
813
+ },
814
+ {
815
+ "epoch": 0.3,
816
+ "learning_rate": 0.00011847961114829109,
817
+ "loss": 1.2979,
818
+ "step": 135
819
+ },
820
+ {
821
+ "epoch": 0.3,
822
+ "learning_rate": 0.0001184472313755988,
823
+ "loss": 1.2764,
824
+ "step": 136
825
+ },
826
+ {
827
+ "epoch": 0.3,
828
+ "learning_rate": 0.0001184145149485899,
829
+ "loss": 1.286,
830
+ "step": 137
831
+ },
832
+ {
833
+ "epoch": 0.31,
834
+ "learning_rate": 0.00011838146205571,
835
+ "loss": 1.2782,
836
+ "step": 138
837
+ },
838
+ {
839
+ "epoch": 0.31,
840
+ "learning_rate": 0.00011834807288734277,
841
+ "loss": 1.2893,
842
+ "step": 139
843
+ },
844
+ {
845
+ "epoch": 0.31,
846
+ "learning_rate": 0.00011831434763580886,
847
+ "loss": 1.2874,
848
+ "step": 140
849
+ },
850
+ {
851
+ "epoch": 0.31,
852
+ "learning_rate": 0.0001182802864953647,
853
+ "loss": 1.3005,
854
+ "step": 141
855
+ },
856
+ {
857
+ "epoch": 0.32,
858
+ "learning_rate": 0.00011824588966220147,
859
+ "loss": 1.2885,
860
+ "step": 142
861
+ },
862
+ {
863
+ "epoch": 0.32,
864
+ "learning_rate": 0.00011821115733444388,
865
+ "loss": 1.291,
866
+ "step": 143
867
+ },
868
+ {
869
+ "epoch": 0.32,
870
+ "learning_rate": 0.00011817608971214912,
871
+ "loss": 1.2475,
872
+ "step": 144
873
+ },
874
+ {
875
+ "epoch": 0.32,
876
+ "learning_rate": 0.00011814068699730562,
877
+ "loss": 1.2787,
878
+ "step": 145
879
+ },
880
+ {
881
+ "epoch": 0.32,
882
+ "learning_rate": 0.00011810494939383203,
883
+ "loss": 1.2816,
884
+ "step": 146
885
+ },
886
+ {
887
+ "epoch": 0.33,
888
+ "learning_rate": 0.00011806887710757583,
889
+ "loss": 1.3126,
890
+ "step": 147
891
+ },
892
+ {
893
+ "epoch": 0.33,
894
+ "learning_rate": 0.00011803247034631235,
895
+ "loss": 1.3111,
896
+ "step": 148
897
+ },
898
+ {
899
+ "epoch": 0.33,
900
+ "learning_rate": 0.00011799572931974343,
901
+ "loss": 1.2751,
902
+ "step": 149
903
+ },
904
+ {
905
+ "epoch": 0.33,
906
+ "learning_rate": 0.0001179586542394963,
907
+ "loss": 1.232,
908
+ "step": 150
909
+ },
910
+ {
911
+ "epoch": 0.34,
912
+ "learning_rate": 0.00011792124531912233,
913
+ "loss": 1.2673,
914
+ "step": 151
915
+ },
916
+ {
917
+ "epoch": 0.34,
918
+ "learning_rate": 0.00011788350277409578,
919
+ "loss": 1.2299,
920
+ "step": 152
921
+ },
922
+ {
923
+ "epoch": 0.34,
924
+ "learning_rate": 0.00011784542682181257,
925
+ "loss": 1.2662,
926
+ "step": 153
927
+ },
928
+ {
929
+ "epoch": 0.34,
930
+ "learning_rate": 0.0001178070176815891,
931
+ "loss": 1.2464,
932
+ "step": 154
933
+ },
934
+ {
935
+ "epoch": 0.34,
936
+ "learning_rate": 0.00011776827557466086,
937
+ "loss": 1.2414,
938
+ "step": 155
939
+ },
940
+ {
941
+ "epoch": 0.35,
942
+ "learning_rate": 0.00011772920072418121,
943
+ "loss": 1.254,
944
+ "step": 156
945
+ },
946
+ {
947
+ "epoch": 0.35,
948
+ "learning_rate": 0.00011768979335522015,
949
+ "loss": 1.2713,
950
+ "step": 157
951
+ },
952
+ {
953
+ "epoch": 0.35,
954
+ "learning_rate": 0.00011765005369476294,
955
+ "loss": 1.2481,
956
+ "step": 158
957
+ },
958
+ {
959
+ "epoch": 0.35,
960
+ "learning_rate": 0.00011760998197170885,
961
+ "loss": 1.2539,
962
+ "step": 159
963
+ },
964
+ {
965
+ "epoch": 0.36,
966
+ "learning_rate": 0.00011756957841686985,
967
+ "loss": 1.2687,
968
+ "step": 160
969
+ },
970
+ {
971
+ "epoch": 0.36,
972
+ "learning_rate": 0.00011752884326296917,
973
+ "loss": 1.2749,
974
+ "step": 161
975
+ },
976
+ {
977
+ "epoch": 0.36,
978
+ "learning_rate": 0.00011748777674464008,
979
+ "loss": 1.2518,
980
+ "step": 162
981
+ },
982
+ {
983
+ "epoch": 0.36,
984
+ "learning_rate": 0.00011744637909842455,
985
+ "loss": 1.3132,
986
+ "step": 163
987
+ },
988
+ {
989
+ "epoch": 0.36,
990
+ "learning_rate": 0.00011740465056277176,
991
+ "loss": 1.3026,
992
+ "step": 164
993
+ },
994
+ {
995
+ "epoch": 0.37,
996
+ "learning_rate": 0.00011736259137803685,
997
+ "loss": 1.2225,
998
+ "step": 165
999
+ },
1000
+ {
1001
+ "epoch": 0.37,
1002
+ "learning_rate": 0.00011732020178647945,
1003
+ "loss": 1.2805,
1004
+ "step": 166
1005
+ },
1006
+ {
1007
+ "epoch": 0.37,
1008
+ "learning_rate": 0.0001172774820322624,
1009
+ "loss": 1.2332,
1010
+ "step": 167
1011
+ },
1012
+ {
1013
+ "epoch": 0.37,
1014
+ "learning_rate": 0.00011723443236145015,
1015
+ "loss": 1.2702,
1016
+ "step": 168
1017
+ },
1018
+ {
1019
+ "epoch": 0.38,
1020
+ "learning_rate": 0.00011719105302200757,
1021
+ "loss": 1.2665,
1022
+ "step": 169
1023
+ },
1024
+ {
1025
+ "epoch": 0.38,
1026
+ "learning_rate": 0.00011714734426379837,
1027
+ "loss": 1.2261,
1028
+ "step": 170
1029
+ },
1030
+ {
1031
+ "epoch": 0.38,
1032
+ "learning_rate": 0.00011710330633858367,
1033
+ "loss": 1.2395,
1034
+ "step": 171
1035
+ },
1036
+ {
1037
+ "epoch": 0.38,
1038
+ "learning_rate": 0.00011705893950002063,
1039
+ "loss": 1.2588,
1040
+ "step": 172
1041
+ },
1042
+ {
1043
+ "epoch": 0.38,
1044
+ "learning_rate": 0.0001170142440036609,
1045
+ "loss": 1.2609,
1046
+ "step": 173
1047
+ },
1048
+ {
1049
+ "epoch": 0.39,
1050
+ "learning_rate": 0.00011696922010694925,
1051
+ "loss": 1.2454,
1052
+ "step": 174
1053
+ },
1054
+ {
1055
+ "epoch": 0.39,
1056
+ "learning_rate": 0.00011692386806922196,
1057
+ "loss": 1.2901,
1058
+ "step": 175
1059
+ },
1060
+ {
1061
+ "epoch": 0.39,
1062
+ "learning_rate": 0.00011687818815170541,
1063
+ "loss": 1.2764,
1064
+ "step": 176
1065
+ },
1066
+ {
1067
+ "epoch": 0.39,
1068
+ "learning_rate": 0.0001168321806175146,
1069
+ "loss": 1.2632,
1070
+ "step": 177
1071
+ },
1072
+ {
1073
+ "epoch": 0.4,
1074
+ "learning_rate": 0.00011678584573165155,
1075
+ "loss": 1.2594,
1076
+ "step": 178
1077
+ },
1078
+ {
1079
+ "epoch": 0.4,
1080
+ "learning_rate": 0.0001167391837610038,
1081
+ "loss": 1.2609,
1082
+ "step": 179
1083
+ },
1084
+ {
1085
+ "epoch": 0.4,
1086
+ "learning_rate": 0.00011669219497434297,
1087
+ "loss": 1.2928,
1088
+ "step": 180
1089
+ },
1090
+ {
1091
+ "epoch": 0.4,
1092
+ "learning_rate": 0.00011664487964232302,
1093
+ "loss": 1.2612,
1094
+ "step": 181
1095
+ },
1096
+ {
1097
+ "epoch": 0.4,
1098
+ "learning_rate": 0.00011659723803747888,
1099
+ "loss": 1.3024,
1100
+ "step": 182
1101
+ },
1102
+ {
1103
+ "epoch": 0.41,
1104
+ "learning_rate": 0.00011654927043422479,
1105
+ "loss": 1.2663,
1106
+ "step": 183
1107
+ },
1108
+ {
1109
+ "epoch": 0.41,
1110
+ "learning_rate": 0.0001165009771088527,
1111
+ "loss": 1.2345,
1112
+ "step": 184
1113
+ },
1114
+ {
1115
+ "epoch": 0.41,
1116
+ "learning_rate": 0.00011645235833953074,
1117
+ "loss": 1.3054,
1118
+ "step": 185
1119
+ },
1120
+ {
1121
+ "epoch": 0.41,
1122
+ "learning_rate": 0.00011640341440630155,
1123
+ "loss": 1.2414,
1124
+ "step": 186
1125
+ },
1126
+ {
1127
+ "epoch": 0.42,
1128
+ "learning_rate": 0.00011635414559108078,
1129
+ "loss": 1.304,
1130
+ "step": 187
1131
+ },
1132
+ {
1133
+ "epoch": 0.42,
1134
+ "learning_rate": 0.00011630455217765531,
1135
+ "loss": 1.2611,
1136
+ "step": 188
1137
+ },
1138
+ {
1139
+ "epoch": 0.42,
1140
+ "learning_rate": 0.00011625463445168175,
1141
+ "loss": 1.2403,
1142
+ "step": 189
1143
+ },
1144
+ {
1145
+ "epoch": 0.42,
1146
+ "learning_rate": 0.00011620439270068469,
1147
+ "loss": 1.2718,
1148
+ "step": 190
1149
+ },
1150
+ {
1151
+ "epoch": 0.42,
1152
+ "learning_rate": 0.00011615382721405513,
1153
+ "loss": 1.2647,
1154
+ "step": 191
1155
+ },
1156
+ {
1157
+ "epoch": 0.43,
1158
+ "learning_rate": 0.0001161029382830488,
1159
+ "loss": 1.2481,
1160
+ "step": 192
1161
+ },
1162
+ {
1163
+ "epoch": 0.43,
1164
+ "learning_rate": 0.00011605172620078439,
1165
+ "loss": 1.25,
1166
+ "step": 193
1167
+ },
1168
+ {
1169
+ "epoch": 0.43,
1170
+ "learning_rate": 0.000116000191262242,
1171
+ "loss": 1.2761,
1172
+ "step": 194
1173
+ },
1174
+ {
1175
+ "epoch": 0.43,
1176
+ "learning_rate": 0.00011594833376426134,
1177
+ "loss": 1.3131,
1178
+ "step": 195
1179
+ },
1180
+ {
1181
+ "epoch": 0.44,
1182
+ "learning_rate": 0.00011589615400554007,
1183
+ "loss": 1.222,
1184
+ "step": 196
1185
+ },
1186
+ {
1187
+ "epoch": 0.44,
1188
+ "learning_rate": 0.00011584365228663202,
1189
+ "loss": 1.2354,
1190
+ "step": 197
1191
+ },
1192
+ {
1193
+ "epoch": 0.44,
1194
+ "learning_rate": 0.00011579082890994557,
1195
+ "loss": 1.2224,
1196
+ "step": 198
1197
+ },
1198
+ {
1199
+ "epoch": 0.44,
1200
+ "learning_rate": 0.00011573768417974176,
1201
+ "loss": 1.2783,
1202
+ "step": 199
1203
+ },
1204
+ {
1205
+ "epoch": 0.44,
1206
+ "learning_rate": 0.00011568421840213267,
1207
+ "loss": 1.2999,
1208
+ "step": 200
1209
+ },
1210
+ {
1211
+ "epoch": 0.45,
1212
+ "learning_rate": 0.00011563043188507961,
1213
+ "loss": 1.2273,
1214
+ "step": 201
1215
+ },
1216
+ {
1217
+ "epoch": 0.45,
1218
+ "learning_rate": 0.0001155763249383913,
1219
+ "loss": 1.2849,
1220
+ "step": 202
1221
+ },
1222
+ {
1223
+ "epoch": 0.45,
1224
+ "learning_rate": 0.00011552189787372217,
1225
+ "loss": 1.2166,
1226
+ "step": 203
1227
+ },
1228
+ {
1229
+ "epoch": 0.45,
1230
+ "learning_rate": 0.00011546715100457046,
1231
+ "loss": 1.2472,
1232
+ "step": 204
1233
+ },
1234
+ {
1235
+ "epoch": 0.46,
1236
+ "learning_rate": 0.00011541208464627652,
1237
+ "loss": 1.2523,
1238
+ "step": 205
1239
+ },
1240
+ {
1241
+ "epoch": 0.46,
1242
+ "learning_rate": 0.00011535669911602097,
1243
+ "loss": 1.2413,
1244
+ "step": 206
1245
+ },
1246
+ {
1247
+ "epoch": 0.46,
1248
+ "learning_rate": 0.00011530099473282279,
1249
+ "loss": 1.2395,
1250
+ "step": 207
1251
+ },
1252
+ {
1253
+ "epoch": 0.46,
1254
+ "learning_rate": 0.00011524497181753759,
1255
+ "loss": 1.2717,
1256
+ "step": 208
1257
+ },
1258
+ {
1259
+ "epoch": 0.46,
1260
+ "learning_rate": 0.00011518863069285567,
1261
+ "loss": 1.2152,
1262
+ "step": 209
1263
+ },
1264
+ {
1265
+ "epoch": 0.47,
1266
+ "learning_rate": 0.00011513197168330026,
1267
+ "loss": 1.2583,
1268
+ "step": 210
1269
+ },
1270
+ {
1271
+ "epoch": 0.47,
1272
+ "learning_rate": 0.00011507499511522556,
1273
+ "loss": 1.2311,
1274
+ "step": 211
1275
+ },
1276
+ {
1277
+ "epoch": 0.47,
1278
+ "learning_rate": 0.00011501770131681491,
1279
+ "loss": 1.276,
1280
+ "step": 212
1281
+ },
1282
+ {
1283
+ "epoch": 0.47,
1284
+ "learning_rate": 0.0001149600906180789,
1285
+ "loss": 1.2171,
1286
+ "step": 213
1287
+ },
1288
+ {
1289
+ "epoch": 0.48,
1290
+ "learning_rate": 0.00011490216335085345,
1291
+ "loss": 1.2381,
1292
+ "step": 214
1293
+ },
1294
+ {
1295
+ "epoch": 0.48,
1296
+ "learning_rate": 0.00011484391984879785,
1297
+ "loss": 1.2577,
1298
+ "step": 215
1299
+ },
1300
+ {
1301
+ "epoch": 0.48,
1302
+ "learning_rate": 0.000114785360447393,
1303
+ "loss": 1.2659,
1304
+ "step": 216
1305
+ },
1306
+ {
1307
+ "epoch": 0.48,
1308
+ "learning_rate": 0.00011472648548393928,
1309
+ "loss": 1.2436,
1310
+ "step": 217
1311
+ },
1312
+ {
1313
+ "epoch": 0.48,
1314
+ "learning_rate": 0.00011466729529755472,
1315
+ "loss": 1.2542,
1316
+ "step": 218
1317
+ },
1318
+ {
1319
+ "epoch": 0.49,
1320
+ "learning_rate": 0.00011460779022917307,
1321
+ "loss": 1.2525,
1322
+ "step": 219
1323
+ },
1324
+ {
1325
+ "epoch": 0.49,
1326
+ "learning_rate": 0.00011454797062154173,
1327
+ "loss": 1.2511,
1328
+ "step": 220
1329
+ },
1330
+ {
1331
+ "epoch": 0.49,
1332
+ "learning_rate": 0.00011448783681921988,
1333
+ "loss": 1.2654,
1334
+ "step": 221
1335
+ },
1336
+ {
1337
+ "epoch": 0.49,
1338
+ "learning_rate": 0.00011442738916857643,
1339
+ "loss": 1.2532,
1340
+ "step": 222
1341
+ },
1342
+ {
1343
+ "epoch": 0.5,
1344
+ "learning_rate": 0.00011436662801778805,
1345
+ "loss": 1.2714,
1346
+ "step": 223
1347
+ },
1348
+ {
1349
+ "epoch": 0.5,
1350
+ "learning_rate": 0.00011430555371683716,
1351
+ "loss": 1.2708,
1352
+ "step": 224
1353
+ },
1354
+ {
1355
+ "epoch": 0.5,
1356
+ "learning_rate": 0.00011424416661750994,
1357
+ "loss": 1.1997,
1358
+ "step": 225
1359
+ },
1360
+ {
1361
+ "epoch": 0.5,
1362
+ "learning_rate": 0.00011418246707339422,
1363
+ "loss": 1.2324,
1364
+ "step": 226
1365
+ },
1366
+ {
1367
+ "epoch": 0.5,
1368
+ "learning_rate": 0.00011412045543987757,
1369
+ "loss": 1.2788,
1370
+ "step": 227
1371
+ },
1372
+ {
1373
+ "epoch": 0.51,
1374
+ "learning_rate": 0.00011405813207414514,
1375
+ "loss": 1.2543,
1376
+ "step": 228
1377
+ },
1378
+ {
1379
+ "epoch": 0.51,
1380
+ "learning_rate": 0.0001139954973351777,
1381
+ "loss": 1.2854,
1382
+ "step": 229
1383
+ },
1384
+ {
1385
+ "epoch": 0.51,
1386
+ "learning_rate": 0.00011393255158374945,
1387
+ "loss": 1.2034,
1388
+ "step": 230
1389
+ },
1390
+ {
1391
+ "epoch": 0.51,
1392
+ "learning_rate": 0.00011386929518242606,
1393
+ "loss": 1.2724,
1394
+ "step": 231
1395
+ },
1396
+ {
1397
+ "epoch": 0.52,
1398
+ "learning_rate": 0.00011380572849556251,
1399
+ "loss": 1.2631,
1400
+ "step": 232
1401
+ },
1402
+ {
1403
+ "epoch": 0.52,
1404
+ "learning_rate": 0.00011374185188930107,
1405
+ "loss": 1.281,
1406
+ "step": 233
1407
+ },
1408
+ {
1409
+ "epoch": 0.52,
1410
+ "learning_rate": 0.00011367766573156905,
1411
+ "loss": 1.2162,
1412
+ "step": 234
1413
+ },
1414
+ {
1415
+ "epoch": 0.52,
1416
+ "learning_rate": 0.00011361317039207682,
1417
+ "loss": 1.2456,
1418
+ "step": 235
1419
+ },
1420
+ {
1421
+ "epoch": 0.52,
1422
+ "learning_rate": 0.00011354836624231564,
1423
+ "loss": 1.2486,
1424
+ "step": 236
1425
+ },
1426
+ {
1427
+ "epoch": 0.53,
1428
+ "learning_rate": 0.00011348325365555547,
1429
+ "loss": 1.2923,
1430
+ "step": 237
1431
+ },
1432
+ {
1433
+ "epoch": 0.53,
1434
+ "learning_rate": 0.00011341783300684288,
1435
+ "loss": 1.2268,
1436
+ "step": 238
1437
+ },
1438
+ {
1439
+ "epoch": 0.53,
1440
+ "learning_rate": 0.00011335210467299887,
1441
+ "loss": 1.2583,
1442
+ "step": 239
1443
+ },
1444
+ {
1445
+ "epoch": 0.53,
1446
+ "learning_rate": 0.00011328606903261669,
1447
+ "loss": 1.2731,
1448
+ "step": 240
1449
+ },
1450
+ {
1451
+ "epoch": 0.54,
1452
+ "learning_rate": 0.00011321972646605965,
1453
+ "loss": 1.2224,
1454
+ "step": 241
1455
+ },
1456
+ {
1457
+ "epoch": 0.54,
1458
+ "learning_rate": 0.00011315307735545897,
1459
+ "loss": 1.2495,
1460
+ "step": 242
1461
+ },
1462
+ {
1463
+ "epoch": 0.54,
1464
+ "learning_rate": 0.00011308612208471157,
1465
+ "loss": 1.2641,
1466
+ "step": 243
1467
+ },
1468
+ {
1469
+ "epoch": 0.54,
1470
+ "learning_rate": 0.0001130188610394778,
1471
+ "loss": 1.2308,
1472
+ "step": 244
1473
+ },
1474
+ {
1475
+ "epoch": 0.54,
1476
+ "learning_rate": 0.00011295129460717928,
1477
+ "loss": 1.2406,
1478
+ "step": 245
1479
+ },
1480
+ {
1481
+ "epoch": 0.55,
1482
+ "learning_rate": 0.00011288342317699666,
1483
+ "loss": 1.252,
1484
+ "step": 246
1485
+ },
1486
+ {
1487
+ "epoch": 0.55,
1488
+ "learning_rate": 0.00011281524713986736,
1489
+ "loss": 1.2578,
1490
+ "step": 247
1491
+ },
1492
+ {
1493
+ "epoch": 0.55,
1494
+ "learning_rate": 0.00011274676688848332,
1495
+ "loss": 1.2609,
1496
+ "step": 248
1497
+ },
1498
+ {
1499
+ "epoch": 0.55,
1500
+ "learning_rate": 0.00011267798281728878,
1501
+ "loss": 1.2398,
1502
+ "step": 249
1503
+ },
1504
+ {
1505
+ "epoch": 0.56,
1506
+ "learning_rate": 0.00011260889532247793,
1507
+ "loss": 1.2689,
1508
+ "step": 250
1509
+ },
1510
+ {
1511
+ "epoch": 0.56,
1512
+ "learning_rate": 0.00011253950480199267,
1513
+ "loss": 1.2193,
1514
+ "step": 251
1515
+ },
1516
+ {
1517
+ "epoch": 0.56,
1518
+ "learning_rate": 0.00011246981165552038,
1519
+ "loss": 1.2361,
1520
+ "step": 252
1521
+ },
1522
+ {
1523
+ "epoch": 0.56,
1524
+ "learning_rate": 0.00011239981628449148,
1525
+ "loss": 1.2999,
1526
+ "step": 253
1527
+ },
1528
+ {
1529
+ "epoch": 0.56,
1530
+ "learning_rate": 0.00011232951909207721,
1531
+ "loss": 1.2357,
1532
+ "step": 254
1533
+ },
1534
+ {
1535
+ "epoch": 0.57,
1536
+ "learning_rate": 0.00011225892048318737,
1537
+ "loss": 1.2478,
1538
+ "step": 255
1539
+ },
1540
+ {
1541
+ "epoch": 0.57,
1542
+ "learning_rate": 0.0001121880208644678,
1543
+ "loss": 1.2718,
1544
+ "step": 256
1545
+ },
1546
+ {
1547
+ "epoch": 0.57,
1548
+ "learning_rate": 0.00011211682064429823,
1549
+ "loss": 1.2786,
1550
+ "step": 257
1551
+ },
1552
+ {
1553
+ "epoch": 0.57,
1554
+ "learning_rate": 0.00011204532023278979,
1555
+ "loss": 1.2636,
1556
+ "step": 258
1557
+ },
1558
+ {
1559
+ "epoch": 0.58,
1560
+ "learning_rate": 0.00011197352004178271,
1561
+ "loss": 1.2662,
1562
+ "step": 259
1563
+ },
1564
+ {
1565
+ "epoch": 0.58,
1566
+ "learning_rate": 0.00011190142048484403,
1567
+ "loss": 1.2616,
1568
+ "step": 260
1569
+ },
1570
+ {
1571
+ "epoch": 0.58,
1572
+ "learning_rate": 0.00011182902197726497,
1573
+ "loss": 1.2511,
1574
+ "step": 261
1575
+ },
1576
+ {
1577
+ "epoch": 0.58,
1578
+ "learning_rate": 0.00011175632493605883,
1579
+ "loss": 1.2794,
1580
+ "step": 262
1581
+ },
1582
+ {
1583
+ "epoch": 0.58,
1584
+ "learning_rate": 0.00011168332977995841,
1585
+ "loss": 1.2709,
1586
+ "step": 263
1587
+ },
1588
+ {
1589
+ "epoch": 0.59,
1590
+ "learning_rate": 0.00011161003692941364,
1591
+ "loss": 1.2583,
1592
+ "step": 264
1593
+ },
1594
+ {
1595
+ "epoch": 0.59,
1596
+ "learning_rate": 0.00011153644680658915,
1597
+ "loss": 1.2573,
1598
+ "step": 265
1599
+ },
1600
+ {
1601
+ "epoch": 0.59,
1602
+ "learning_rate": 0.00011146255983536184,
1603
+ "loss": 1.2251,
1604
+ "step": 266
1605
+ },
1606
+ {
1607
+ "epoch": 0.59,
1608
+ "learning_rate": 0.0001113883764413185,
1609
+ "loss": 1.2647,
1610
+ "step": 267
1611
+ },
1612
+ {
1613
+ "epoch": 0.6,
1614
+ "learning_rate": 0.00011131389705175328,
1615
+ "loss": 1.2252,
1616
+ "step": 268
1617
+ },
1618
+ {
1619
+ "epoch": 0.6,
1620
+ "learning_rate": 0.00011123912209566525,
1621
+ "loss": 1.2373,
1622
+ "step": 269
1623
+ },
1624
+ {
1625
+ "epoch": 0.6,
1626
+ "learning_rate": 0.00011116405200375591,
1627
+ "loss": 1.2447,
1628
+ "step": 270
1629
+ },
1630
+ {
1631
+ "epoch": 0.6,
1632
+ "learning_rate": 0.00011108868720842679,
1633
+ "loss": 1.2294,
1634
+ "step": 271
1635
+ },
1636
+ {
1637
+ "epoch": 0.6,
1638
+ "learning_rate": 0.00011101302814377686,
1639
+ "loss": 1.2497,
1640
+ "step": 272
1641
+ },
1642
+ {
1643
+ "epoch": 0.61,
1644
+ "learning_rate": 0.00011093707524560006,
1645
+ "loss": 1.2151,
1646
+ "step": 273
1647
+ },
1648
+ {
1649
+ "epoch": 0.61,
1650
+ "learning_rate": 0.00011086082895138288,
1651
+ "loss": 1.2533,
1652
+ "step": 274
1653
+ },
1654
+ {
1655
+ "epoch": 0.61,
1656
+ "learning_rate": 0.00011078428970030167,
1657
+ "loss": 1.2424,
1658
+ "step": 275
1659
+ },
1660
+ {
1661
+ "epoch": 0.61,
1662
+ "learning_rate": 0.00011070745793322026,
1663
+ "loss": 1.2656,
1664
+ "step": 276
1665
+ },
1666
+ {
1667
+ "epoch": 0.62,
1668
+ "learning_rate": 0.00011063033409268734,
1669
+ "loss": 1.2047,
1670
+ "step": 277
1671
+ },
1672
+ {
1673
+ "epoch": 0.62,
1674
+ "learning_rate": 0.00011055291862293394,
1675
+ "loss": 1.2705,
1676
+ "step": 278
1677
+ },
1678
+ {
1679
+ "epoch": 0.62,
1680
+ "learning_rate": 0.00011047521196987087,
1681
+ "loss": 1.2819,
1682
+ "step": 279
1683
+ },
1684
+ {
1685
+ "epoch": 0.62,
1686
+ "learning_rate": 0.00011039721458108616,
1687
+ "loss": 1.2561,
1688
+ "step": 280
1689
+ },
1690
+ {
1691
+ "epoch": 0.62,
1692
+ "learning_rate": 0.00011031892690584239,
1693
+ "loss": 1.2291,
1694
+ "step": 281
1695
+ },
1696
+ {
1697
+ "epoch": 0.63,
1698
+ "learning_rate": 0.00011024034939507433,
1699
+ "loss": 1.2234,
1700
+ "step": 282
1701
+ },
1702
+ {
1703
+ "epoch": 0.63,
1704
+ "learning_rate": 0.00011016148250138605,
1705
+ "loss": 1.247,
1706
+ "step": 283
1707
+ },
1708
+ {
1709
+ "epoch": 0.63,
1710
+ "learning_rate": 0.00011008232667904853,
1711
+ "loss": 1.2593,
1712
+ "step": 284
1713
+ },
1714
+ {
1715
+ "epoch": 0.63,
1716
+ "learning_rate": 0.00011000288238399695,
1717
+ "loss": 1.255,
1718
+ "step": 285
1719
+ },
1720
+ {
1721
+ "epoch": 0.64,
1722
+ "learning_rate": 0.0001099231500738281,
1723
+ "loss": 1.2471,
1724
+ "step": 286
1725
+ },
1726
+ {
1727
+ "epoch": 0.64,
1728
+ "learning_rate": 0.00010984313020779771,
1729
+ "loss": 1.2817,
1730
+ "step": 287
1731
+ },
1732
+ {
1733
+ "epoch": 0.64,
1734
+ "learning_rate": 0.00010976282324681785,
1735
+ "loss": 1.2424,
1736
+ "step": 288
1737
+ },
1738
+ {
1739
+ "epoch": 0.64,
1740
+ "learning_rate": 0.00010968222965345421,
1741
+ "loss": 1.2042,
1742
+ "step": 289
1743
+ },
1744
+ {
1745
+ "epoch": 0.64,
1746
+ "learning_rate": 0.0001096013498919235,
1747
+ "loss": 1.2324,
1748
+ "step": 290
1749
+ },
1750
+ {
1751
+ "epoch": 0.65,
1752
+ "learning_rate": 0.00010952018442809074,
1753
+ "loss": 1.2063,
1754
+ "step": 291
1755
+ },
1756
+ {
1757
+ "epoch": 0.65,
1758
+ "learning_rate": 0.00010943873372946661,
1759
+ "loss": 1.2345,
1760
+ "step": 292
1761
+ },
1762
+ {
1763
+ "epoch": 0.65,
1764
+ "learning_rate": 0.00010935699826520467,
1765
+ "loss": 1.2373,
1766
+ "step": 293
1767
+ },
1768
+ {
1769
+ "epoch": 0.65,
1770
+ "learning_rate": 0.00010927497850609882,
1771
+ "loss": 1.2694,
1772
+ "step": 294
1773
+ },
1774
+ {
1775
+ "epoch": 0.66,
1776
+ "learning_rate": 0.00010919267492458041,
1777
+ "loss": 1.2393,
1778
+ "step": 295
1779
+ },
1780
+ {
1781
+ "epoch": 0.66,
1782
+ "learning_rate": 0.00010911008799471562,
1783
+ "loss": 1.2307,
1784
+ "step": 296
1785
+ },
1786
+ {
1787
+ "epoch": 0.66,
1788
+ "learning_rate": 0.00010902721819220271,
1789
+ "loss": 1.2317,
1790
+ "step": 297
1791
+ },
1792
+ {
1793
+ "epoch": 0.66,
1794
+ "learning_rate": 0.0001089440659943693,
1795
+ "loss": 1.2129,
1796
+ "step": 298
1797
+ },
1798
+ {
1799
+ "epoch": 0.66,
1800
+ "learning_rate": 0.00010886063188016958,
1801
+ "loss": 1.2341,
1802
+ "step": 299
1803
+ },
1804
+ {
1805
+ "epoch": 0.67,
1806
+ "learning_rate": 0.00010877691633018154,
1807
+ "loss": 1.2456,
1808
+ "step": 300
1809
+ },
1810
+ {
1811
+ "epoch": 0.67,
1812
+ "learning_rate": 0.0001086929198266043,
1813
+ "loss": 1.2511,
1814
+ "step": 301
1815
+ },
1816
+ {
1817
+ "epoch": 0.67,
1818
+ "learning_rate": 0.0001086086428532552,
1819
+ "loss": 1.247,
1820
+ "step": 302
1821
+ },
1822
+ {
1823
+ "epoch": 0.67,
1824
+ "learning_rate": 0.0001085240858955671,
1825
+ "loss": 1.2702,
1826
+ "step": 303
1827
+ },
1828
+ {
1829
+ "epoch": 0.68,
1830
+ "learning_rate": 0.00010843924944058557,
1831
+ "loss": 1.2601,
1832
+ "step": 304
1833
+ },
1834
+ {
1835
+ "epoch": 0.68,
1836
+ "learning_rate": 0.0001083541339769661,
1837
+ "loss": 1.1809,
1838
+ "step": 305
1839
+ },
1840
+ {
1841
+ "epoch": 0.68,
1842
+ "learning_rate": 0.00010826873999497118,
1843
+ "loss": 1.2388,
1844
+ "step": 306
1845
+ },
1846
+ {
1847
+ "epoch": 0.68,
1848
+ "learning_rate": 0.00010818306798646766,
1849
+ "loss": 1.2608,
1850
+ "step": 307
1851
+ },
1852
+ {
1853
+ "epoch": 0.68,
1854
+ "learning_rate": 0.00010809711844492373,
1855
+ "loss": 1.2479,
1856
+ "step": 308
1857
+ },
1858
+ {
1859
+ "epoch": 0.69,
1860
+ "learning_rate": 0.00010801089186540621,
1861
+ "loss": 1.2148,
1862
+ "step": 309
1863
+ },
1864
+ {
1865
+ "epoch": 0.69,
1866
+ "learning_rate": 0.00010792438874457763,
1867
+ "loss": 1.225,
1868
+ "step": 310
1869
+ },
1870
+ {
1871
+ "epoch": 0.69,
1872
+ "learning_rate": 0.00010783760958069341,
1873
+ "loss": 1.2321,
1874
+ "step": 311
1875
+ },
1876
+ {
1877
+ "epoch": 0.69,
1878
+ "learning_rate": 0.00010775055487359894,
1879
+ "loss": 1.2556,
1880
+ "step": 312
1881
+ },
1882
+ {
1883
+ "epoch": 0.7,
1884
+ "learning_rate": 0.00010766322512472675,
1885
+ "loss": 1.2122,
1886
+ "step": 313
1887
+ },
1888
+ {
1889
+ "epoch": 0.7,
1890
+ "learning_rate": 0.00010757562083709362,
1891
+ "loss": 1.2606,
1892
+ "step": 314
1893
+ },
1894
+ {
1895
+ "epoch": 0.7,
1896
+ "learning_rate": 0.00010748774251529763,
1897
+ "loss": 1.2528,
1898
+ "step": 315
1899
+ },
1900
+ {
1901
+ "epoch": 0.7,
1902
+ "learning_rate": 0.00010739959066551528,
1903
+ "loss": 1.2228,
1904
+ "step": 316
1905
+ },
1906
+ {
1907
+ "epoch": 0.7,
1908
+ "learning_rate": 0.00010731116579549864,
1909
+ "loss": 1.1934,
1910
+ "step": 317
1911
+ },
1912
+ {
1913
+ "epoch": 0.71,
1914
+ "learning_rate": 0.00010722246841457232,
1915
+ "loss": 1.2169,
1916
+ "step": 318
1917
+ },
1918
+ {
1919
+ "epoch": 0.71,
1920
+ "learning_rate": 0.0001071334990336306,
1921
+ "loss": 1.2657,
1922
+ "step": 319
1923
+ },
1924
+ {
1925
+ "epoch": 0.71,
1926
+ "learning_rate": 0.0001070442581651345,
1927
+ "loss": 1.2457,
1928
+ "step": 320
1929
+ },
1930
+ {
1931
+ "epoch": 0.71,
1932
+ "learning_rate": 0.00010695474632310871,
1933
+ "loss": 1.2288,
1934
+ "step": 321
1935
+ },
1936
+ {
1937
+ "epoch": 0.72,
1938
+ "learning_rate": 0.00010686496402313882,
1939
+ "loss": 1.2224,
1940
+ "step": 322
1941
+ },
1942
+ {
1943
+ "epoch": 0.72,
1944
+ "learning_rate": 0.00010677491178236823,
1945
+ "loss": 1.2083,
1946
+ "step": 323
1947
+ },
1948
+ {
1949
+ "epoch": 0.72,
1950
+ "learning_rate": 0.00010668459011949512,
1951
+ "loss": 1.2031,
1952
+ "step": 324
1953
+ },
1954
+ {
1955
+ "epoch": 0.72,
1956
+ "learning_rate": 0.00010659399955476964,
1957
+ "loss": 1.2406,
1958
+ "step": 325
1959
+ },
1960
+ {
1961
+ "epoch": 0.72,
1962
+ "learning_rate": 0.00010650314060999073,
1963
+ "loss": 1.1586,
1964
+ "step": 326
1965
+ },
1966
+ {
1967
+ "epoch": 0.73,
1968
+ "learning_rate": 0.00010641201380850319,
1969
+ "loss": 1.2393,
1970
+ "step": 327
1971
+ },
1972
+ {
1973
+ "epoch": 0.73,
1974
+ "learning_rate": 0.00010632061967519473,
1975
+ "loss": 1.1987,
1976
+ "step": 328
1977
+ },
1978
+ {
1979
+ "epoch": 0.73,
1980
+ "learning_rate": 0.00010622895873649281,
1981
+ "loss": 1.1982,
1982
+ "step": 329
1983
+ },
1984
+ {
1985
+ "epoch": 0.73,
1986
+ "learning_rate": 0.00010613703152036172,
1987
+ "loss": 1.2368,
1988
+ "step": 330
1989
+ },
1990
+ {
1991
+ "epoch": 0.74,
1992
+ "learning_rate": 0.00010604483855629952,
1993
+ "loss": 1.2315,
1994
+ "step": 331
1995
+ },
1996
+ {
1997
+ "epoch": 0.74,
1998
+ "learning_rate": 0.00010595238037533491,
1999
+ "loss": 1.1949,
2000
+ "step": 332
2001
+ },
2002
+ {
2003
+ "epoch": 0.74,
2004
+ "learning_rate": 0.0001058596575100243,
2005
+ "loss": 1.2115,
2006
+ "step": 333
2007
+ },
2008
+ {
2009
+ "epoch": 0.74,
2010
+ "learning_rate": 0.00010576667049444861,
2011
+ "loss": 1.2225,
2012
+ "step": 334
2013
+ },
2014
+ {
2015
+ "epoch": 0.74,
2016
+ "learning_rate": 0.0001056734198642103,
2017
+ "loss": 1.2243,
2018
+ "step": 335
2019
+ },
2020
+ {
2021
+ "epoch": 0.75,
2022
+ "learning_rate": 0.00010557990615643023,
2023
+ "loss": 1.2686,
2024
+ "step": 336
2025
+ },
2026
+ {
2027
+ "epoch": 0.75,
2028
+ "learning_rate": 0.00010548612990974458,
2029
+ "loss": 1.2224,
2030
+ "step": 337
2031
+ },
2032
+ {
2033
+ "epoch": 0.75,
2034
+ "learning_rate": 0.00010539209166430176,
2035
+ "loss": 1.2389,
2036
+ "step": 338
2037
+ },
2038
+ {
2039
+ "epoch": 0.75,
2040
+ "learning_rate": 0.00010529779196175924,
2041
+ "loss": 1.2085,
2042
+ "step": 339
2043
+ },
2044
+ {
2045
+ "epoch": 0.76,
2046
+ "learning_rate": 0.00010520323134528051,
2047
+ "loss": 1.2501,
2048
+ "step": 340
2049
+ },
2050
+ {
2051
+ "epoch": 0.76,
2052
+ "learning_rate": 0.00010510841035953194,
2053
+ "loss": 1.2202,
2054
+ "step": 341
2055
+ },
2056
+ {
2057
+ "epoch": 0.76,
2058
+ "learning_rate": 0.00010501332955067958,
2059
+ "loss": 1.2156,
2060
+ "step": 342
2061
+ },
2062
+ {
2063
+ "epoch": 0.76,
2064
+ "learning_rate": 0.00010491798946638606,
2065
+ "loss": 1.2211,
2066
+ "step": 343
2067
+ },
2068
+ {
2069
+ "epoch": 0.76,
2070
+ "learning_rate": 0.00010482239065580742,
2071
+ "loss": 1.2209,
2072
+ "step": 344
2073
+ },
2074
+ {
2075
+ "epoch": 0.77,
2076
+ "learning_rate": 0.00010472653366958998,
2077
+ "loss": 1.2249,
2078
+ "step": 345
2079
+ },
2080
+ {
2081
+ "epoch": 0.77,
2082
+ "learning_rate": 0.00010463041905986715,
2083
+ "loss": 1.1985,
2084
+ "step": 346
2085
+ },
2086
+ {
2087
+ "epoch": 0.77,
2088
+ "learning_rate": 0.0001045340473802562,
2089
+ "loss": 1.2689,
2090
+ "step": 347
2091
+ },
2092
+ {
2093
+ "epoch": 0.77,
2094
+ "learning_rate": 0.00010443741918585517,
2095
+ "loss": 1.2237,
2096
+ "step": 348
2097
+ },
2098
+ {
2099
+ "epoch": 0.78,
2100
+ "learning_rate": 0.00010434053503323955,
2101
+ "loss": 1.2322,
2102
+ "step": 349
2103
+ },
2104
+ {
2105
+ "epoch": 0.78,
2106
+ "learning_rate": 0.00010424339548045921,
2107
+ "loss": 1.195,
2108
+ "step": 350
2109
+ },
2110
+ {
2111
+ "epoch": 0.78,
2112
+ "learning_rate": 0.0001041460010870351,
2113
+ "loss": 1.1922,
2114
+ "step": 351
2115
+ },
2116
+ {
2117
+ "epoch": 0.78,
2118
+ "learning_rate": 0.00010404835241395601,
2119
+ "loss": 1.2476,
2120
+ "step": 352
2121
+ },
2122
+ {
2123
+ "epoch": 0.78,
2124
+ "learning_rate": 0.00010395045002367541,
2125
+ "loss": 1.2289,
2126
+ "step": 353
2127
+ },
2128
+ {
2129
+ "epoch": 0.79,
2130
+ "learning_rate": 0.00010385229448010814,
2131
+ "loss": 1.249,
2132
+ "step": 354
2133
+ },
2134
+ {
2135
+ "epoch": 0.79,
2136
+ "learning_rate": 0.00010375388634862723,
2137
+ "loss": 1.2138,
2138
+ "step": 355
2139
+ },
2140
+ {
2141
+ "epoch": 0.79,
2142
+ "learning_rate": 0.00010365522619606062,
2143
+ "loss": 1.1953,
2144
+ "step": 356
2145
+ },
2146
+ {
2147
+ "epoch": 0.79,
2148
+ "learning_rate": 0.00010355631459068779,
2149
+ "loss": 1.2433,
2150
+ "step": 357
2151
+ },
2152
+ {
2153
+ "epoch": 0.8,
2154
+ "learning_rate": 0.00010345715210223671,
2155
+ "loss": 1.1922,
2156
+ "step": 358
2157
+ },
2158
+ {
2159
+ "epoch": 0.8,
2160
+ "learning_rate": 0.00010335773930188036,
2161
+ "loss": 1.2071,
2162
+ "step": 359
2163
+ },
2164
+ {
2165
+ "epoch": 0.8,
2166
+ "learning_rate": 0.0001032580767622335,
2167
+ "loss": 1.2189,
2168
+ "step": 360
2169
+ },
2170
+ {
2171
+ "epoch": 0.8,
2172
+ "learning_rate": 0.0001031581650573494,
2173
+ "loss": 1.2081,
2174
+ "step": 361
2175
+ },
2176
+ {
2177
+ "epoch": 0.8,
2178
+ "learning_rate": 0.00010305800476271651,
2179
+ "loss": 1.2028,
2180
+ "step": 362
2181
+ },
2182
+ {
2183
+ "epoch": 0.81,
2184
+ "learning_rate": 0.00010295759645525515,
2185
+ "loss": 1.2179,
2186
+ "step": 363
2187
+ },
2188
+ {
2189
+ "epoch": 0.81,
2190
+ "learning_rate": 0.00010285694071331422,
2191
+ "loss": 1.2041,
2192
+ "step": 364
2193
+ },
2194
+ {
2195
+ "epoch": 0.81,
2196
+ "learning_rate": 0.00010275603811666778,
2197
+ "loss": 1.2169,
2198
+ "step": 365
2199
+ },
2200
+ {
2201
+ "epoch": 0.81,
2202
+ "learning_rate": 0.00010265488924651176,
2203
+ "loss": 1.2272,
2204
+ "step": 366
2205
+ },
2206
+ {
2207
+ "epoch": 0.82,
2208
+ "learning_rate": 0.00010255349468546072,
2209
+ "loss": 1.2104,
2210
+ "step": 367
2211
+ },
2212
+ {
2213
+ "epoch": 0.82,
2214
+ "learning_rate": 0.00010245185501754425,
2215
+ "loss": 1.2381,
2216
+ "step": 368
2217
+ },
2218
+ {
2219
+ "epoch": 0.82,
2220
+ "learning_rate": 0.00010234997082820383,
2221
+ "loss": 1.2231,
2222
+ "step": 369
2223
+ },
2224
+ {
2225
+ "epoch": 0.82,
2226
+ "learning_rate": 0.00010224784270428942,
2227
+ "loss": 1.2252,
2228
+ "step": 370
2229
+ },
2230
+ {
2231
+ "epoch": 0.82,
2232
+ "learning_rate": 0.00010214547123405592,
2233
+ "loss": 1.2611,
2234
+ "step": 371
2235
+ },
2236
+ {
2237
+ "epoch": 0.83,
2238
+ "learning_rate": 0.00010204285700715998,
2239
+ "loss": 1.2557,
2240
+ "step": 372
2241
+ },
2242
+ {
2243
+ "epoch": 0.83,
2244
+ "learning_rate": 0.00010194000061465648,
2245
+ "loss": 1.2176,
2246
+ "step": 373
2247
+ },
2248
+ {
2249
+ "epoch": 0.83,
2250
+ "learning_rate": 0.00010183690264899521,
2251
+ "loss": 1.2211,
2252
+ "step": 374
2253
+ },
2254
+ {
2255
+ "epoch": 0.83,
2256
+ "learning_rate": 0.00010173356370401741,
2257
+ "loss": 1.2117,
2258
+ "step": 375
2259
+ },
2260
+ {
2261
+ "epoch": 0.84,
2262
+ "learning_rate": 0.00010162998437495228,
2263
+ "loss": 1.2404,
2264
+ "step": 376
2265
+ },
2266
+ {
2267
+ "epoch": 0.84,
2268
+ "learning_rate": 0.0001015261652584137,
2269
+ "loss": 1.2181,
2270
+ "step": 377
2271
+ },
2272
+ {
2273
+ "epoch": 0.84,
2274
+ "learning_rate": 0.0001014221069523967,
2275
+ "loss": 1.2391,
2276
+ "step": 378
2277
+ },
2278
+ {
2279
+ "epoch": 0.84,
2280
+ "learning_rate": 0.00010131781005627406,
2281
+ "loss": 1.2204,
2282
+ "step": 379
2283
+ },
2284
+ {
2285
+ "epoch": 0.84,
2286
+ "learning_rate": 0.00010121327517079276,
2287
+ "loss": 1.2545,
2288
+ "step": 380
2289
+ },
2290
+ {
2291
+ "epoch": 0.85,
2292
+ "learning_rate": 0.00010110850289807066,
2293
+ "loss": 1.2036,
2294
+ "step": 381
2295
+ },
2296
+ {
2297
+ "epoch": 0.85,
2298
+ "learning_rate": 0.00010100349384159291,
2299
+ "loss": 1.2642,
2300
+ "step": 382
2301
+ },
2302
+ {
2303
+ "epoch": 0.85,
2304
+ "learning_rate": 0.00010089824860620861,
2305
+ "loss": 1.2331,
2306
+ "step": 383
2307
+ },
2308
+ {
2309
+ "epoch": 0.85,
2310
+ "learning_rate": 0.00010079276779812714,
2311
+ "loss": 1.2063,
2312
+ "step": 384
2313
+ },
2314
+ {
2315
+ "epoch": 0.86,
2316
+ "learning_rate": 0.00010068705202491485,
2317
+ "loss": 1.1969,
2318
+ "step": 385
2319
+ },
2320
+ {
2321
+ "epoch": 0.86,
2322
+ "learning_rate": 0.00010058110189549143,
2323
+ "loss": 1.2161,
2324
+ "step": 386
2325
+ },
2326
+ {
2327
+ "epoch": 0.86,
2328
+ "learning_rate": 0.00010047491802012648,
2329
+ "loss": 1.2371,
2330
+ "step": 387
2331
+ },
2332
+ {
2333
+ "epoch": 0.86,
2334
+ "learning_rate": 0.000100368501010436,
2335
+ "loss": 1.2154,
2336
+ "step": 388
2337
+ },
2338
+ {
2339
+ "epoch": 0.86,
2340
+ "learning_rate": 0.00010026185147937877,
2341
+ "loss": 1.2139,
2342
+ "step": 389
2343
+ },
2344
+ {
2345
+ "epoch": 0.87,
2346
+ "learning_rate": 0.00010015497004125293,
2347
+ "loss": 1.2437,
2348
+ "step": 390
2349
+ },
2350
+ {
2351
+ "epoch": 0.87,
2352
+ "learning_rate": 0.00010004785731169242,
2353
+ "loss": 1.2077,
2354
+ "step": 391
2355
+ },
2356
+ {
2357
+ "epoch": 0.87,
2358
+ "learning_rate": 9.994051390766333e-05,
2359
+ "loss": 1.2164,
2360
+ "step": 392
2361
+ },
2362
+ {
2363
+ "epoch": 0.87,
2364
+ "learning_rate": 9.983294044746051e-05,
2365
+ "loss": 1.2177,
2366
+ "step": 393
2367
+ },
2368
+ {
2369
+ "epoch": 0.88,
2370
+ "learning_rate": 9.97251375507039e-05,
2371
+ "loss": 1.2081,
2372
+ "step": 394
2373
+ },
2374
+ {
2375
+ "epoch": 0.88,
2376
+ "learning_rate": 9.961710583833494e-05,
2377
+ "loss": 1.2373,
2378
+ "step": 395
2379
+ },
2380
+ {
2381
+ "epoch": 0.88,
2382
+ "learning_rate": 9.950884593261315e-05,
2383
+ "loss": 1.218,
2384
+ "step": 396
2385
+ },
2386
+ {
2387
+ "epoch": 0.88,
2388
+ "learning_rate": 9.940035845711232e-05,
2389
+ "loss": 1.24,
2390
+ "step": 397
2391
+ },
2392
+ {
2393
+ "epoch": 0.88,
2394
+ "learning_rate": 9.929164403671711e-05,
2395
+ "loss": 1.1771,
2396
+ "step": 398
2397
+ },
2398
+ {
2399
+ "epoch": 0.89,
2400
+ "learning_rate": 9.918270329761933e-05,
2401
+ "loss": 1.2131,
2402
+ "step": 399
2403
+ },
2404
+ {
2405
+ "epoch": 0.89,
2406
+ "learning_rate": 9.907353686731444e-05,
2407
+ "loss": 1.2335,
2408
+ "step": 400
2409
+ },
2410
+ {
2411
+ "epoch": 0.89,
2412
+ "learning_rate": 9.89641453745978e-05,
2413
+ "loss": 1.2427,
2414
+ "step": 401
2415
+ },
2416
+ {
2417
+ "epoch": 0.89,
2418
+ "learning_rate": 9.885452944956118e-05,
2419
+ "loss": 1.2184,
2420
+ "step": 402
2421
+ },
2422
+ {
2423
+ "epoch": 0.9,
2424
+ "learning_rate": 9.874468972358904e-05,
2425
+ "loss": 1.2157,
2426
+ "step": 403
2427
+ },
2428
+ {
2429
+ "epoch": 0.9,
2430
+ "learning_rate": 9.863462682935493e-05,
2431
+ "loss": 1.2399,
2432
+ "step": 404
2433
+ },
2434
+ {
2435
+ "epoch": 0.9,
2436
+ "learning_rate": 9.852434140081789e-05,
2437
+ "loss": 1.221,
2438
+ "step": 405
2439
+ },
2440
+ {
2441
+ "epoch": 0.9,
2442
+ "learning_rate": 9.841383407321866e-05,
2443
+ "loss": 1.2568,
2444
+ "step": 406
2445
+ },
2446
+ {
2447
+ "epoch": 0.9,
2448
+ "learning_rate": 9.830310548307622e-05,
2449
+ "loss": 1.2178,
2450
+ "step": 407
2451
+ },
2452
+ {
2453
+ "epoch": 0.91,
2454
+ "learning_rate": 9.819215626818392e-05,
2455
+ "loss": 1.2101,
2456
+ "step": 408
2457
+ },
2458
+ {
2459
+ "epoch": 0.91,
2460
+ "learning_rate": 9.808098706760595e-05,
2461
+ "loss": 1.212,
2462
+ "step": 409
2463
+ },
2464
+ {
2465
+ "epoch": 0.91,
2466
+ "learning_rate": 9.796959852167363e-05,
2467
+ "loss": 1.2028,
2468
+ "step": 410
2469
+ },
2470
+ {
2471
+ "epoch": 0.91,
2472
+ "learning_rate": 9.785799127198162e-05,
2473
+ "loss": 1.1922,
2474
+ "step": 411
2475
+ },
2476
+ {
2477
+ "epoch": 0.92,
2478
+ "learning_rate": 9.77461659613844e-05,
2479
+ "loss": 1.1668,
2480
+ "step": 412
2481
+ },
2482
+ {
2483
+ "epoch": 0.92,
2484
+ "learning_rate": 9.763412323399245e-05,
2485
+ "loss": 1.1926,
2486
+ "step": 413
2487
+ },
2488
+ {
2489
+ "epoch": 0.92,
2490
+ "learning_rate": 9.752186373516853e-05,
2491
+ "loss": 1.2085,
2492
+ "step": 414
2493
+ },
2494
+ {
2495
+ "epoch": 0.92,
2496
+ "learning_rate": 9.740938811152401e-05,
2497
+ "loss": 1.2137,
2498
+ "step": 415
2499
+ },
2500
+ {
2501
+ "epoch": 0.92,
2502
+ "learning_rate": 9.729669701091517e-05,
2503
+ "loss": 1.2701,
2504
+ "step": 416
2505
+ },
2506
+ {
2507
+ "epoch": 0.93,
2508
+ "learning_rate": 9.718379108243939e-05,
2509
+ "loss": 1.2591,
2510
+ "step": 417
2511
+ },
2512
+ {
2513
+ "epoch": 0.93,
2514
+ "learning_rate": 9.707067097643147e-05,
2515
+ "loss": 1.2277,
2516
+ "step": 418
2517
+ },
2518
+ {
2519
+ "epoch": 0.93,
2520
+ "learning_rate": 9.695733734445982e-05,
2521
+ "loss": 1.2128,
2522
+ "step": 419
2523
+ },
2524
+ {
2525
+ "epoch": 0.93,
2526
+ "learning_rate": 9.684379083932286e-05,
2527
+ "loss": 1.2091,
2528
+ "step": 420
2529
+ },
2530
+ {
2531
+ "epoch": 0.94,
2532
+ "learning_rate": 9.673003211504503e-05,
2533
+ "loss": 1.2067,
2534
+ "step": 421
2535
+ },
2536
+ {
2537
+ "epoch": 0.94,
2538
+ "learning_rate": 9.661606182687324e-05,
2539
+ "loss": 1.191,
2540
+ "step": 422
2541
+ },
2542
+ {
2543
+ "epoch": 0.94,
2544
+ "learning_rate": 9.650188063127296e-05,
2545
+ "loss": 1.1973,
2546
+ "step": 423
2547
+ },
2548
+ {
2549
+ "epoch": 0.94,
2550
+ "learning_rate": 9.638748918592445e-05,
2551
+ "loss": 1.2405,
2552
+ "step": 424
2553
+ },
2554
+ {
2555
+ "epoch": 0.94,
2556
+ "learning_rate": 9.627288814971908e-05,
2557
+ "loss": 1.2487,
2558
+ "step": 425
2559
+ },
2560
+ {
2561
+ "epoch": 0.95,
2562
+ "learning_rate": 9.615807818275539e-05,
2563
+ "loss": 1.1887,
2564
+ "step": 426
2565
+ },
2566
+ {
2567
+ "epoch": 0.95,
2568
+ "learning_rate": 9.604305994633539e-05,
2569
+ "loss": 1.2214,
2570
+ "step": 427
2571
+ },
2572
+ {
2573
+ "epoch": 0.95,
2574
+ "learning_rate": 9.592783410296071e-05,
2575
+ "loss": 1.2136,
2576
+ "step": 428
2577
+ },
2578
+ {
2579
+ "epoch": 0.95,
2580
+ "learning_rate": 9.581240131632876e-05,
2581
+ "loss": 1.2074,
2582
+ "step": 429
2583
+ },
2584
+ {
2585
+ "epoch": 0.96,
2586
+ "learning_rate": 9.569676225132898e-05,
2587
+ "loss": 1.1687,
2588
+ "step": 430
2589
+ },
2590
+ {
2591
+ "epoch": 0.96,
2592
+ "learning_rate": 9.558091757403897e-05,
2593
+ "loss": 1.1988,
2594
+ "step": 431
2595
+ },
2596
+ {
2597
+ "epoch": 0.96,
2598
+ "learning_rate": 9.54648679517206e-05,
2599
+ "loss": 1.2025,
2600
+ "step": 432
2601
+ },
2602
+ {
2603
+ "epoch": 0.96,
2604
+ "learning_rate": 9.534861405281625e-05,
2605
+ "loss": 1.2027,
2606
+ "step": 433
2607
+ },
2608
+ {
2609
+ "epoch": 0.96,
2610
+ "learning_rate": 9.523215654694493e-05,
2611
+ "loss": 1.2192,
2612
+ "step": 434
2613
+ },
2614
+ {
2615
+ "epoch": 0.97,
2616
+ "learning_rate": 9.511549610489844e-05,
2617
+ "loss": 1.2245,
2618
+ "step": 435
2619
+ },
2620
+ {
2621
+ "epoch": 0.97,
2622
+ "learning_rate": 9.499863339863741e-05,
2623
+ "loss": 1.1959,
2624
+ "step": 436
2625
+ },
2626
+ {
2627
+ "epoch": 0.97,
2628
+ "learning_rate": 9.48815691012876e-05,
2629
+ "loss": 1.2241,
2630
+ "step": 437
2631
+ },
2632
+ {
2633
+ "epoch": 0.97,
2634
+ "learning_rate": 9.476430388713586e-05,
2635
+ "loss": 1.2185,
2636
+ "step": 438
2637
+ },
2638
+ {
2639
+ "epoch": 0.98,
2640
+ "learning_rate": 9.464683843162635e-05,
2641
+ "loss": 1.202,
2642
+ "step": 439
2643
+ },
2644
+ {
2645
+ "epoch": 0.98,
2646
+ "learning_rate": 9.45291734113566e-05,
2647
+ "loss": 1.2031,
2648
+ "step": 440
2649
+ },
2650
+ {
2651
+ "epoch": 0.98,
2652
+ "learning_rate": 9.441130950407367e-05,
2653
+ "loss": 1.2341,
2654
+ "step": 441
2655
+ },
2656
+ {
2657
+ "epoch": 0.98,
2658
+ "learning_rate": 9.42932473886701e-05,
2659
+ "loss": 1.2146,
2660
+ "step": 442
2661
+ },
2662
+ {
2663
+ "epoch": 0.98,
2664
+ "learning_rate": 9.417498774518019e-05,
2665
+ "loss": 1.2221,
2666
+ "step": 443
2667
+ },
2668
+ {
2669
+ "epoch": 0.99,
2670
+ "learning_rate": 9.4056531254776e-05,
2671
+ "loss": 1.2147,
2672
+ "step": 444
2673
+ },
2674
+ {
2675
+ "epoch": 0.99,
2676
+ "learning_rate": 9.393787859976338e-05,
2677
+ "loss": 1.229,
2678
+ "step": 445
2679
+ },
2680
+ {
2681
+ "epoch": 0.99,
2682
+ "learning_rate": 9.381903046357809e-05,
2683
+ "loss": 1.2305,
2684
+ "step": 446
2685
+ },
2686
+ {
2687
+ "epoch": 0.99,
2688
+ "learning_rate": 9.369998753078188e-05,
2689
+ "loss": 1.2403,
2690
+ "step": 447
2691
+ },
2692
+ {
2693
+ "epoch": 1.0,
2694
+ "learning_rate": 9.35807504870585e-05,
2695
+ "loss": 1.2102,
2696
+ "step": 448
2697
+ },
2698
+ {
2699
+ "epoch": 1.0,
2700
+ "learning_rate": 9.346132001920977e-05,
2701
+ "loss": 1.1927,
2702
+ "step": 449
2703
+ },
2704
+ {
2705
+ "epoch": 1.0,
2706
+ "learning_rate": 9.334169681515164e-05,
2707
+ "loss": 1.1759,
2708
+ "step": 450
2709
+ },
2710
+ {
2711
+ "epoch": 1.0,
2712
+ "learning_rate": 9.322188156391023e-05,
2713
+ "loss": 0.9853,
2714
+ "step": 451
2715
+ },
2716
+ {
2717
+ "epoch": 1.0,
2718
+ "learning_rate": 9.31018749556178e-05,
2719
+ "loss": 0.9395,
2720
+ "step": 452
2721
+ },
2722
+ {
2723
+ "epoch": 1.01,
2724
+ "learning_rate": 9.29816776815089e-05,
2725
+ "loss": 0.9399,
2726
+ "step": 453
2727
+ },
2728
+ {
2729
+ "epoch": 1.01,
2730
+ "learning_rate": 9.28612904339162e-05,
2731
+ "loss": 0.8974,
2732
+ "step": 454
2733
+ },
2734
+ {
2735
+ "epoch": 1.01,
2736
+ "learning_rate": 9.27407139062667e-05,
2737
+ "loss": 0.9517,
2738
+ "step": 455
2739
+ },
2740
+ {
2741
+ "epoch": 1.01,
2742
+ "learning_rate": 9.261994879307761e-05,
2743
+ "loss": 0.889,
2744
+ "step": 456
2745
+ },
2746
+ {
2747
+ "epoch": 1.02,
2748
+ "learning_rate": 9.24989957899524e-05,
2749
+ "loss": 0.9045,
2750
+ "step": 457
2751
+ },
2752
+ {
2753
+ "epoch": 1.02,
2754
+ "learning_rate": 9.237785559357675e-05,
2755
+ "loss": 0.8965,
2756
+ "step": 458
2757
+ },
2758
+ {
2759
+ "epoch": 1.02,
2760
+ "learning_rate": 9.225652890171464e-05,
2761
+ "loss": 0.9576,
2762
+ "step": 459
2763
+ },
2764
+ {
2765
+ "epoch": 1.02,
2766
+ "learning_rate": 9.213501641320418e-05,
2767
+ "loss": 0.9374,
2768
+ "step": 460
2769
+ },
2770
+ {
2771
+ "epoch": 1.02,
2772
+ "learning_rate": 9.20133188279537e-05,
2773
+ "loss": 0.8864,
2774
+ "step": 461
2775
+ },
2776
+ {
2777
+ "epoch": 1.03,
2778
+ "learning_rate": 9.189143684693768e-05,
2779
+ "loss": 0.8912,
2780
+ "step": 462
2781
+ },
2782
+ {
2783
+ "epoch": 1.03,
2784
+ "learning_rate": 9.176937117219272e-05,
2785
+ "loss": 0.8981,
2786
+ "step": 463
2787
+ },
2788
+ {
2789
+ "epoch": 1.03,
2790
+ "learning_rate": 9.164712250681344e-05,
2791
+ "loss": 0.892,
2792
+ "step": 464
2793
+ },
2794
+ {
2795
+ "epoch": 1.03,
2796
+ "learning_rate": 9.152469155494857e-05,
2797
+ "loss": 0.8896,
2798
+ "step": 465
2799
+ },
2800
+ {
2801
+ "epoch": 1.04,
2802
+ "learning_rate": 9.140207902179673e-05,
2803
+ "loss": 0.9112,
2804
+ "step": 466
2805
+ },
2806
+ {
2807
+ "epoch": 1.04,
2808
+ "learning_rate": 9.127928561360246e-05,
2809
+ "loss": 0.8909,
2810
+ "step": 467
2811
+ },
2812
+ {
2813
+ "epoch": 1.04,
2814
+ "learning_rate": 9.115631203765218e-05,
2815
+ "loss": 0.9034,
2816
+ "step": 468
2817
+ },
2818
+ {
2819
+ "epoch": 1.04,
2820
+ "learning_rate": 9.103315900226999e-05,
2821
+ "loss": 0.903,
2822
+ "step": 469
2823
+ },
2824
+ {
2825
+ "epoch": 1.04,
2826
+ "learning_rate": 9.090982721681376e-05,
2827
+ "loss": 0.9069,
2828
+ "step": 470
2829
+ },
2830
+ {
2831
+ "epoch": 1.05,
2832
+ "learning_rate": 9.07863173916709e-05,
2833
+ "loss": 0.877,
2834
+ "step": 471
2835
+ },
2836
+ {
2837
+ "epoch": 1.05,
2838
+ "learning_rate": 9.06626302382543e-05,
2839
+ "loss": 0.874,
2840
+ "step": 472
2841
+ },
2842
+ {
2843
+ "epoch": 1.05,
2844
+ "learning_rate": 9.05387664689983e-05,
2845
+ "loss": 0.8924,
2846
+ "step": 473
2847
+ },
2848
+ {
2849
+ "epoch": 1.05,
2850
+ "learning_rate": 9.041472679735459e-05,
2851
+ "loss": 0.9288,
2852
+ "step": 474
2853
+ },
2854
+ {
2855
+ "epoch": 1.06,
2856
+ "learning_rate": 9.029051193778793e-05,
2857
+ "loss": 0.8858,
2858
+ "step": 475
2859
+ },
2860
+ {
2861
+ "epoch": 1.06,
2862
+ "learning_rate": 9.016612260577223e-05,
2863
+ "loss": 0.869,
2864
+ "step": 476
2865
+ },
2866
+ {
2867
+ "epoch": 1.06,
2868
+ "learning_rate": 9.004155951778635e-05,
2869
+ "loss": 0.8812,
2870
+ "step": 477
2871
+ },
2872
+ {
2873
+ "epoch": 1.06,
2874
+ "learning_rate": 8.991682339130999e-05,
2875
+ "loss": 0.8824,
2876
+ "step": 478
2877
+ },
2878
+ {
2879
+ "epoch": 1.06,
2880
+ "learning_rate": 8.979191494481956e-05,
2881
+ "loss": 0.891,
2882
+ "step": 479
2883
+ },
2884
+ {
2885
+ "epoch": 1.07,
2886
+ "learning_rate": 8.966683489778394e-05,
2887
+ "loss": 0.8898,
2888
+ "step": 480
2889
+ },
2890
+ {
2891
+ "epoch": 1.07,
2892
+ "learning_rate": 8.954158397066053e-05,
2893
+ "loss": 0.8971,
2894
+ "step": 481
2895
+ },
2896
+ {
2897
+ "epoch": 1.07,
2898
+ "learning_rate": 8.941616288489093e-05,
2899
+ "loss": 0.8956,
2900
+ "step": 482
2901
+ },
2902
+ {
2903
+ "epoch": 1.07,
2904
+ "learning_rate": 8.929057236289687e-05,
2905
+ "loss": 0.903,
2906
+ "step": 483
2907
+ },
2908
+ {
2909
+ "epoch": 1.08,
2910
+ "learning_rate": 8.916481312807606e-05,
2911
+ "loss": 0.9051,
2912
+ "step": 484
2913
+ },
2914
+ {
2915
+ "epoch": 1.08,
2916
+ "learning_rate": 8.90388859047979e-05,
2917
+ "loss": 0.8458,
2918
+ "step": 485
2919
+ },
2920
+ {
2921
+ "epoch": 1.08,
2922
+ "learning_rate": 8.891279141839948e-05,
2923
+ "loss": 0.8767,
2924
+ "step": 486
2925
+ },
2926
+ {
2927
+ "epoch": 1.08,
2928
+ "learning_rate": 8.878653039518131e-05,
2929
+ "loss": 0.878,
2930
+ "step": 487
2931
+ },
2932
+ {
2933
+ "epoch": 1.08,
2934
+ "learning_rate": 8.866010356240313e-05,
2935
+ "loss": 0.9164,
2936
+ "step": 488
2937
+ },
2938
+ {
2939
+ "epoch": 1.09,
2940
+ "learning_rate": 8.853351164827973e-05,
2941
+ "loss": 0.8943,
2942
+ "step": 489
2943
+ },
2944
+ {
2945
+ "epoch": 1.09,
2946
+ "learning_rate": 8.840675538197676e-05,
2947
+ "loss": 0.8845,
2948
+ "step": 490
2949
+ },
2950
+ {
2951
+ "epoch": 1.09,
2952
+ "learning_rate": 8.827983549360659e-05,
2953
+ "loss": 0.8685,
2954
+ "step": 491
2955
+ },
2956
+ {
2957
+ "epoch": 1.09,
2958
+ "learning_rate": 8.815275271422398e-05,
2959
+ "loss": 0.898,
2960
+ "step": 492
2961
+ },
2962
+ {
2963
+ "epoch": 1.1,
2964
+ "learning_rate": 8.802550777582197e-05,
2965
+ "loss": 0.8824,
2966
+ "step": 493
2967
+ },
2968
+ {
2969
+ "epoch": 1.1,
2970
+ "learning_rate": 8.789810141132762e-05,
2971
+ "loss": 0.9068,
2972
+ "step": 494
2973
+ },
2974
+ {
2975
+ "epoch": 1.1,
2976
+ "learning_rate": 8.777053435459781e-05,
2977
+ "loss": 0.9178,
2978
+ "step": 495
2979
+ },
2980
+ {
2981
+ "epoch": 1.1,
2982
+ "learning_rate": 8.7642807340415e-05,
2983
+ "loss": 0.886,
2984
+ "step": 496
2985
+ },
2986
+ {
2987
+ "epoch": 1.1,
2988
+ "learning_rate": 8.7514921104483e-05,
2989
+ "loss": 0.8724,
2990
+ "step": 497
2991
+ },
2992
+ {
2993
+ "epoch": 1.11,
2994
+ "learning_rate": 8.738687638342273e-05,
2995
+ "loss": 0.8978,
2996
+ "step": 498
2997
+ },
2998
+ {
2999
+ "epoch": 1.11,
3000
+ "learning_rate": 8.725867391476798e-05,
3001
+ "loss": 0.9092,
3002
+ "step": 499
3003
+ },
3004
+ {
3005
+ "epoch": 1.11,
3006
+ "learning_rate": 8.713031443696114e-05,
3007
+ "loss": 0.9074,
3008
+ "step": 500
3009
+ }
3010
+ ],
3011
+ "max_steps": 1350,
3012
+ "num_train_epochs": 3,
3013
+ "total_flos": 4.9660776799430246e+17,
3014
+ "trial_name": null,
3015
+ "trial_params": null
3016
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec794cf2a744aebfddf8da932b53a867de494475dc2d023514e32a174e3f5b0b
3
+ size 4091