sedrickkeh commited on
Commit
2a57002
·
verified ·
1 Parent(s): 05bb086

Training in progress, epoch 0

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-7B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.46.1",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87d0a79977342628e08005eb498c2036831df7cefd81c82e88a37c085e484e39
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb751c9f6a28686ac877c582ea21290eaffa5a543b6f5472c9107d8e24c7e791
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b87deb1b846b785e4df513b7d0da504b167c28bfdf004a30938e9398f52864e
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00566972bcc9483327c83e76ce4250ab1b25fec34b31986562bd103685b0300e
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|endoftext|>",
201
+ "errors": "replace",
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 1815, "loss": 1.1009, "lr": 4.395604395604396e-07, "epoch": 0.0027491408934707906, "percentage": 0.06, "elapsed_time": "0:01:33", "remaining_time": "1 day, 23:08:05"}
2
+ {"current_steps": 2, "total_steps": 1815, "loss": 1.1005, "lr": 8.791208791208792e-07, "epoch": 0.005498281786941581, "percentage": 0.11, "elapsed_time": "0:02:23", "remaining_time": "1 day, 12:11:49"}
3
+ {"current_steps": 3, "total_steps": 1815, "loss": 1.1055, "lr": 1.3186813186813187e-06, "epoch": 0.008247422680412371, "percentage": 0.17, "elapsed_time": "0:03:14", "remaining_time": "1 day, 8:35:33"}
4
+ {"current_steps": 4, "total_steps": 1815, "loss": 1.1099, "lr": 1.7582417582417585e-06, "epoch": 0.010996563573883162, "percentage": 0.22, "elapsed_time": "0:04:05", "remaining_time": "1 day, 6:50:58"}
5
+ {"current_steps": 5, "total_steps": 1815, "loss": 1.081, "lr": 2.197802197802198e-06, "epoch": 0.013745704467353952, "percentage": 0.28, "elapsed_time": "0:04:56", "remaining_time": "1 day, 5:46:37"}
6
+ {"current_steps": 6, "total_steps": 1815, "loss": 1.0487, "lr": 2.6373626373626375e-06, "epoch": 0.016494845360824743, "percentage": 0.33, "elapsed_time": "0:05:46", "remaining_time": "1 day, 5:01:32"}
7
+ {"current_steps": 7, "total_steps": 1815, "loss": 1.0556, "lr": 3.0769230769230774e-06, "epoch": 0.019243986254295534, "percentage": 0.39, "elapsed_time": "0:06:37", "remaining_time": "1 day, 4:29:45"}
8
+ {"current_steps": 8, "total_steps": 1815, "loss": 1.0049, "lr": 3.516483516483517e-06, "epoch": 0.021993127147766325, "percentage": 0.44, "elapsed_time": "0:07:27", "remaining_time": "1 day, 4:03:36"}
9
+ {"current_steps": 9, "total_steps": 1815, "loss": 0.9803, "lr": 3.9560439560439565e-06, "epoch": 0.024742268041237112, "percentage": 0.5, "elapsed_time": "0:08:17", "remaining_time": "1 day, 3:43:44"}
10
+ {"current_steps": 10, "total_steps": 1815, "loss": 0.9589, "lr": 4.395604395604396e-06, "epoch": 0.027491408934707903, "percentage": 0.55, "elapsed_time": "0:09:07", "remaining_time": "1 day, 3:27:15"}
11
+ {"current_steps": 11, "total_steps": 1815, "loss": 0.9677, "lr": 4.8351648351648355e-06, "epoch": 0.030240549828178694, "percentage": 0.61, "elapsed_time": "0:09:57", "remaining_time": "1 day, 3:14:14"}
12
+ {"current_steps": 12, "total_steps": 1815, "loss": 0.96, "lr": 5.274725274725275e-06, "epoch": 0.032989690721649485, "percentage": 0.66, "elapsed_time": "0:10:47", "remaining_time": "1 day, 3:02:37"}
13
+ {"current_steps": 13, "total_steps": 1815, "loss": 0.9481, "lr": 5.7142857142857145e-06, "epoch": 0.03573883161512027, "percentage": 0.72, "elapsed_time": "0:11:44", "remaining_time": "1 day, 3:07:58"}
14
+ {"current_steps": 14, "total_steps": 1815, "loss": 0.9057, "lr": 6.153846153846155e-06, "epoch": 0.03848797250859107, "percentage": 0.77, "elapsed_time": "0:12:34", "remaining_time": "1 day, 2:58:12"}
15
+ {"current_steps": 15, "total_steps": 1815, "loss": 0.9048, "lr": 6.5934065934065935e-06, "epoch": 0.041237113402061855, "percentage": 0.83, "elapsed_time": "0:13:24", "remaining_time": "1 day, 2:49:40"}
16
+ {"current_steps": 16, "total_steps": 1815, "loss": 0.8869, "lr": 7.032967032967034e-06, "epoch": 0.04398625429553265, "percentage": 0.88, "elapsed_time": "0:14:15", "remaining_time": "1 day, 2:42:52"}
17
+ {"current_steps": 17, "total_steps": 1815, "loss": 0.8503, "lr": 7.472527472527473e-06, "epoch": 0.04673539518900344, "percentage": 0.94, "elapsed_time": "0:15:05", "remaining_time": "1 day, 2:36:11"}
18
+ {"current_steps": 18, "total_steps": 1815, "loss": 0.8405, "lr": 7.912087912087913e-06, "epoch": 0.049484536082474224, "percentage": 0.99, "elapsed_time": "0:15:55", "remaining_time": "1 day, 2:29:56"}
19
+ {"current_steps": 19, "total_steps": 1815, "loss": 0.8274, "lr": 8.351648351648353e-06, "epoch": 0.05223367697594502, "percentage": 1.05, "elapsed_time": "0:16:45", "remaining_time": "1 day, 2:24:20"}
20
+ {"current_steps": 20, "total_steps": 1815, "loss": 0.8428, "lr": 8.791208791208792e-06, "epoch": 0.054982817869415807, "percentage": 1.1, "elapsed_time": "0:17:35", "remaining_time": "1 day, 2:19:05"}
21
+ {"current_steps": 21, "total_steps": 1815, "loss": 0.8211, "lr": 9.230769230769232e-06, "epoch": 0.0577319587628866, "percentage": 1.16, "elapsed_time": "0:18:25", "remaining_time": "1 day, 2:14:23"}
22
+ {"current_steps": 22, "total_steps": 1815, "loss": 0.8053, "lr": 9.670329670329671e-06, "epoch": 0.06048109965635739, "percentage": 1.21, "elapsed_time": "0:19:15", "remaining_time": "1 day, 2:10:11"}
23
+ {"current_steps": 23, "total_steps": 1815, "loss": 0.8101, "lr": 1.010989010989011e-05, "epoch": 0.06323024054982818, "percentage": 1.27, "elapsed_time": "0:20:06", "remaining_time": "1 day, 2:06:11"}
24
+ {"current_steps": 24, "total_steps": 1815, "loss": 0.7837, "lr": 1.054945054945055e-05, "epoch": 0.06597938144329897, "percentage": 1.32, "elapsed_time": "0:20:56", "remaining_time": "1 day, 2:02:56"}
25
+ {"current_steps": 25, "total_steps": 1815, "loss": 0.79, "lr": 1.098901098901099e-05, "epoch": 0.06872852233676977, "percentage": 1.38, "elapsed_time": "0:21:46", "remaining_time": "1 day, 1:59:21"}
26
+ {"current_steps": 26, "total_steps": 1815, "loss": 0.7927, "lr": 1.1428571428571429e-05, "epoch": 0.07147766323024055, "percentage": 1.43, "elapsed_time": "0:22:36", "remaining_time": "1 day, 1:55:52"}
27
+ {"current_steps": 27, "total_steps": 1815, "loss": 0.7826, "lr": 1.186813186813187e-05, "epoch": 0.07422680412371134, "percentage": 1.49, "elapsed_time": "0:23:27", "remaining_time": "1 day, 1:53:27"}
28
+ {"current_steps": 28, "total_steps": 1815, "loss": 0.77, "lr": 1.230769230769231e-05, "epoch": 0.07697594501718213, "percentage": 1.54, "elapsed_time": "0:24:24", "remaining_time": "1 day, 1:57:47"}
29
+ {"current_steps": 29, "total_steps": 1815, "loss": 0.764, "lr": 1.2747252747252747e-05, "epoch": 0.07972508591065292, "percentage": 1.6, "elapsed_time": "0:25:14", "remaining_time": "1 day, 1:54:40"}
30
+ {"current_steps": 30, "total_steps": 1815, "loss": 0.7667, "lr": 1.3186813186813187e-05, "epoch": 0.08247422680412371, "percentage": 1.65, "elapsed_time": "0:26:05", "remaining_time": "1 day, 1:52:09"}
31
+ {"current_steps": 31, "total_steps": 1815, "loss": 0.7669, "lr": 1.3626373626373627e-05, "epoch": 0.0852233676975945, "percentage": 1.71, "elapsed_time": "0:26:55", "remaining_time": "1 day, 1:49:20"}
32
+ {"current_steps": 32, "total_steps": 1815, "loss": 0.7617, "lr": 1.4065934065934068e-05, "epoch": 0.0879725085910653, "percentage": 1.76, "elapsed_time": "0:27:45", "remaining_time": "1 day, 1:46:34"}
33
+ {"current_steps": 33, "total_steps": 1815, "loss": 0.7554, "lr": 1.4505494505494506e-05, "epoch": 0.09072164948453608, "percentage": 1.82, "elapsed_time": "0:28:35", "remaining_time": "1 day, 1:44:01"}
34
+ {"current_steps": 34, "total_steps": 1815, "loss": 0.75, "lr": 1.4945054945054947e-05, "epoch": 0.09347079037800687, "percentage": 1.87, "elapsed_time": "0:29:25", "remaining_time": "1 day, 1:41:24"}
35
+ {"current_steps": 35, "total_steps": 1815, "loss": 0.7467, "lr": 1.5384615384615387e-05, "epoch": 0.09621993127147767, "percentage": 1.93, "elapsed_time": "0:30:15", "remaining_time": "1 day, 1:38:54"}
36
+ {"current_steps": 36, "total_steps": 1815, "loss": 0.7383, "lr": 1.5824175824175826e-05, "epoch": 0.09896907216494845, "percentage": 1.98, "elapsed_time": "0:31:05", "remaining_time": "1 day, 1:36:31"}
37
+ {"current_steps": 37, "total_steps": 1815, "loss": 0.7524, "lr": 1.6263736263736265e-05, "epoch": 0.10171821305841924, "percentage": 2.04, "elapsed_time": "0:31:55", "remaining_time": "1 day, 1:34:18"}
38
+ {"current_steps": 38, "total_steps": 1815, "loss": 0.7371, "lr": 1.6703296703296707e-05, "epoch": 0.10446735395189004, "percentage": 2.09, "elapsed_time": "0:32:45", "remaining_time": "1 day, 1:32:09"}
39
+ {"current_steps": 39, "total_steps": 1815, "loss": 0.7332, "lr": 1.7142857142857142e-05, "epoch": 0.10721649484536082, "percentage": 2.15, "elapsed_time": "0:33:35", "remaining_time": "1 day, 1:30:03"}
40
+ {"current_steps": 40, "total_steps": 1815, "loss": 0.7301, "lr": 1.7582417582417584e-05, "epoch": 0.10996563573883161, "percentage": 2.2, "elapsed_time": "0:34:25", "remaining_time": "1 day, 1:27:55"}
41
+ {"current_steps": 41, "total_steps": 1815, "loss": 0.7481, "lr": 1.8021978021978023e-05, "epoch": 0.11271477663230241, "percentage": 2.26, "elapsed_time": "0:35:15", "remaining_time": "1 day, 1:25:54"}
42
+ {"current_steps": 42, "total_steps": 1815, "loss": 0.7234, "lr": 1.8461538461538465e-05, "epoch": 0.1154639175257732, "percentage": 2.31, "elapsed_time": "0:36:07", "remaining_time": "1 day, 1:25:03"}
43
+ {"current_steps": 43, "total_steps": 1815, "loss": 0.7382, "lr": 1.8901098901098903e-05, "epoch": 0.11821305841924398, "percentage": 2.37, "elapsed_time": "0:37:05", "remaining_time": "1 day, 1:28:22"}
44
+ {"current_steps": 44, "total_steps": 1815, "loss": 0.7274, "lr": 1.9340659340659342e-05, "epoch": 0.12096219931271478, "percentage": 2.42, "elapsed_time": "0:37:55", "remaining_time": "1 day, 1:26:34"}
45
+ {"current_steps": 45, "total_steps": 1815, "loss": 0.7222, "lr": 1.9780219780219784e-05, "epoch": 0.12371134020618557, "percentage": 2.48, "elapsed_time": "0:38:45", "remaining_time": "1 day, 1:24:41"}
46
+ {"current_steps": 46, "total_steps": 1815, "loss": 0.7062, "lr": 2.021978021978022e-05, "epoch": 0.12646048109965635, "percentage": 2.53, "elapsed_time": "0:39:35", "remaining_time": "1 day, 1:22:50"}
47
+ {"current_steps": 47, "total_steps": 1815, "loss": 0.7273, "lr": 2.0659340659340665e-05, "epoch": 0.12920962199312716, "percentage": 2.59, "elapsed_time": "0:40:26", "remaining_time": "1 day, 1:21:01"}
48
+ {"current_steps": 48, "total_steps": 1815, "loss": 0.7144, "lr": 2.10989010989011e-05, "epoch": 0.13195876288659794, "percentage": 2.64, "elapsed_time": "0:41:16", "remaining_time": "1 day, 1:19:18"}
49
+ {"current_steps": 49, "total_steps": 1815, "loss": 0.7284, "lr": 2.153846153846154e-05, "epoch": 0.13470790378006872, "percentage": 2.7, "elapsed_time": "0:42:06", "remaining_time": "1 day, 1:17:37"}
50
+ {"current_steps": 50, "total_steps": 1815, "loss": 0.7197, "lr": 2.197802197802198e-05, "epoch": 0.13745704467353953, "percentage": 2.75, "elapsed_time": "0:42:56", "remaining_time": "1 day, 1:15:59"}
51
+ {"current_steps": 51, "total_steps": 1815, "loss": 0.7168, "lr": 2.241758241758242e-05, "epoch": 0.1402061855670103, "percentage": 2.81, "elapsed_time": "0:43:46", "remaining_time": "1 day, 1:14:22"}
52
+ {"current_steps": 52, "total_steps": 1815, "loss": 0.7047, "lr": 2.2857142857142858e-05, "epoch": 0.1429553264604811, "percentage": 2.87, "elapsed_time": "0:44:37", "remaining_time": "1 day, 1:12:41"}
53
+ {"current_steps": 53, "total_steps": 1815, "loss": 0.7203, "lr": 2.32967032967033e-05, "epoch": 0.1457044673539519, "percentage": 2.92, "elapsed_time": "0:45:27", "remaining_time": "1 day, 1:11:04"}
54
+ {"current_steps": 54, "total_steps": 1815, "loss": 0.7132, "lr": 2.373626373626374e-05, "epoch": 0.14845360824742268, "percentage": 2.98, "elapsed_time": "0:46:17", "remaining_time": "1 day, 1:09:39"}
55
+ {"current_steps": 55, "total_steps": 1815, "loss": 0.7139, "lr": 2.4175824175824177e-05, "epoch": 0.15120274914089346, "percentage": 3.03, "elapsed_time": "0:47:07", "remaining_time": "1 day, 1:08:08"}
56
+ {"current_steps": 56, "total_steps": 1815, "loss": 0.7201, "lr": 2.461538461538462e-05, "epoch": 0.15395189003436427, "percentage": 3.09, "elapsed_time": "0:47:57", "remaining_time": "1 day, 1:06:37"}
57
+ {"current_steps": 57, "total_steps": 1815, "loss": 0.6997, "lr": 2.5054945054945058e-05, "epoch": 0.15670103092783505, "percentage": 3.14, "elapsed_time": "0:48:50", "remaining_time": "1 day, 1:06:19"}
58
+ {"current_steps": 58, "total_steps": 1815, "loss": 0.708, "lr": 2.5494505494505493e-05, "epoch": 0.15945017182130583, "percentage": 3.2, "elapsed_time": "0:49:49", "remaining_time": "1 day, 1:09:21"}
59
+ {"current_steps": 59, "total_steps": 1815, "loss": 0.7023, "lr": 2.593406593406594e-05, "epoch": 0.16219931271477664, "percentage": 3.25, "elapsed_time": "0:50:39", "remaining_time": "1 day, 1:07:42"}
60
+ {"current_steps": 60, "total_steps": 1815, "loss": 0.699, "lr": 2.6373626373626374e-05, "epoch": 0.16494845360824742, "percentage": 3.31, "elapsed_time": "0:51:29", "remaining_time": "1 day, 1:06:11"}
61
+ {"current_steps": 61, "total_steps": 1815, "loss": 0.6944, "lr": 2.6813186813186813e-05, "epoch": 0.1676975945017182, "percentage": 3.36, "elapsed_time": "0:52:19", "remaining_time": "1 day, 1:04:40"}
62
+ {"current_steps": 62, "total_steps": 1815, "loss": 0.6897, "lr": 2.7252747252747255e-05, "epoch": 0.170446735395189, "percentage": 3.42, "elapsed_time": "0:53:10", "remaining_time": "1 day, 1:03:27"}
63
+ {"current_steps": 63, "total_steps": 1815, "loss": 0.6937, "lr": 2.7692307692307694e-05, "epoch": 0.1731958762886598, "percentage": 3.47, "elapsed_time": "0:54:00", "remaining_time": "1 day, 1:02:00"}
64
+ {"current_steps": 64, "total_steps": 1815, "loss": 0.6924, "lr": 2.8131868131868136e-05, "epoch": 0.1759450171821306, "percentage": 3.53, "elapsed_time": "0:54:50", "remaining_time": "1 day, 1:00:35"}
65
+ {"current_steps": 65, "total_steps": 1815, "loss": 0.7036, "lr": 2.8571428571428574e-05, "epoch": 0.17869415807560138, "percentage": 3.58, "elapsed_time": "0:55:40", "remaining_time": "1 day, 0:59:09"}
66
+ {"current_steps": 66, "total_steps": 1815, "loss": 0.7067, "lr": 2.9010989010989013e-05, "epoch": 0.18144329896907216, "percentage": 3.64, "elapsed_time": "0:56:31", "remaining_time": "1 day, 0:57:44"}
67
+ {"current_steps": 67, "total_steps": 1815, "loss": 0.6846, "lr": 2.9450549450549455e-05, "epoch": 0.18419243986254297, "percentage": 3.69, "elapsed_time": "0:57:21", "remaining_time": "1 day, 0:56:22"}
68
+ {"current_steps": 68, "total_steps": 1815, "loss": 0.7001, "lr": 2.9890109890109894e-05, "epoch": 0.18694158075601375, "percentage": 3.75, "elapsed_time": "0:58:11", "remaining_time": "1 day, 0:54:57"}
69
+ {"current_steps": 69, "total_steps": 1815, "loss": 0.7017, "lr": 3.0329670329670332e-05, "epoch": 0.18969072164948453, "percentage": 3.8, "elapsed_time": "0:59:01", "remaining_time": "1 day, 0:53:36"}
70
+ {"current_steps": 70, "total_steps": 1815, "loss": 0.6945, "lr": 3.0769230769230774e-05, "epoch": 0.19243986254295534, "percentage": 3.86, "elapsed_time": "0:59:51", "remaining_time": "1 day, 0:52:16"}
71
+ {"current_steps": 71, "total_steps": 1815, "loss": 0.6983, "lr": 3.120879120879121e-05, "epoch": 0.19518900343642612, "percentage": 3.91, "elapsed_time": "1:00:41", "remaining_time": "1 day, 0:50:52"}
72
+ {"current_steps": 72, "total_steps": 1815, "loss": 0.6944, "lr": 3.164835164835165e-05, "epoch": 0.1979381443298969, "percentage": 3.97, "elapsed_time": "1:01:34", "remaining_time": "1 day, 0:50:45"}
73
+ {"current_steps": 73, "total_steps": 1815, "loss": 0.6797, "lr": 3.2087912087912094e-05, "epoch": 0.2006872852233677, "percentage": 4.02, "elapsed_time": "1:02:33", "remaining_time": "1 day, 0:52:51"}
74
+ {"current_steps": 74, "total_steps": 1815, "loss": 0.6916, "lr": 3.252747252747253e-05, "epoch": 0.2034364261168385, "percentage": 4.08, "elapsed_time": "1:03:23", "remaining_time": "1 day, 0:51:27"}
75
+ {"current_steps": 75, "total_steps": 1815, "loss": 0.696, "lr": 3.296703296703297e-05, "epoch": 0.20618556701030927, "percentage": 4.13, "elapsed_time": "1:04:13", "remaining_time": "1 day, 0:50:10"}
76
+ {"current_steps": 76, "total_steps": 1815, "loss": 0.6946, "lr": 3.340659340659341e-05, "epoch": 0.20893470790378008, "percentage": 4.19, "elapsed_time": "1:05:03", "remaining_time": "1 day, 0:48:47"}
77
+ {"current_steps": 77, "total_steps": 1815, "loss": 0.6938, "lr": 3.384615384615385e-05, "epoch": 0.21168384879725086, "percentage": 4.24, "elapsed_time": "1:05:54", "remaining_time": "1 day, 0:47:29"}
78
+ {"current_steps": 78, "total_steps": 1815, "loss": 0.708, "lr": 3.4285714285714284e-05, "epoch": 0.21443298969072164, "percentage": 4.3, "elapsed_time": "1:06:44", "remaining_time": "1 day, 0:46:13"}
79
+ {"current_steps": 79, "total_steps": 1815, "loss": 0.7043, "lr": 3.4725274725274726e-05, "epoch": 0.21718213058419245, "percentage": 4.35, "elapsed_time": "1:07:34", "remaining_time": "1 day, 0:44:57"}
80
+ {"current_steps": 80, "total_steps": 1815, "loss": 0.7015, "lr": 3.516483516483517e-05, "epoch": 0.21993127147766323, "percentage": 4.41, "elapsed_time": "1:08:24", "remaining_time": "1 day, 0:43:43"}
81
+ {"current_steps": 81, "total_steps": 1815, "loss": 0.6934, "lr": 3.56043956043956e-05, "epoch": 0.22268041237113403, "percentage": 4.46, "elapsed_time": "1:09:14", "remaining_time": "1 day, 0:42:26"}
82
+ {"current_steps": 82, "total_steps": 1815, "loss": 0.6923, "lr": 3.6043956043956045e-05, "epoch": 0.22542955326460482, "percentage": 4.52, "elapsed_time": "1:10:04", "remaining_time": "1 day, 0:41:07"}
83
+ {"current_steps": 83, "total_steps": 1815, "loss": 0.6932, "lr": 3.648351648351649e-05, "epoch": 0.2281786941580756, "percentage": 4.57, "elapsed_time": "1:10:55", "remaining_time": "1 day, 0:39:52"}
84
+ {"current_steps": 84, "total_steps": 1815, "loss": 0.6837, "lr": 3.692307692307693e-05, "epoch": 0.2309278350515464, "percentage": 4.63, "elapsed_time": "1:11:45", "remaining_time": "1 day, 0:38:37"}
85
+ {"current_steps": 85, "total_steps": 1815, "loss": 0.6824, "lr": 3.7362637362637365e-05, "epoch": 0.23367697594501718, "percentage": 4.68, "elapsed_time": "1:12:35", "remaining_time": "1 day, 0:37:27"}
86
+ {"current_steps": 86, "total_steps": 1815, "loss": 0.6818, "lr": 3.7802197802197807e-05, "epoch": 0.23642611683848797, "percentage": 4.74, "elapsed_time": "1:13:25", "remaining_time": "1 day, 0:36:18"}
87
+ {"current_steps": 87, "total_steps": 1815, "loss": 0.6881, "lr": 3.824175824175825e-05, "epoch": 0.23917525773195877, "percentage": 4.79, "elapsed_time": "1:14:19", "remaining_time": "1 day, 0:36:07"}
88
+ {"current_steps": 88, "total_steps": 1815, "loss": 0.694, "lr": 3.8681318681318684e-05, "epoch": 0.24192439862542955, "percentage": 4.85, "elapsed_time": "1:15:18", "remaining_time": "1 day, 0:37:53"}
89
+ {"current_steps": 89, "total_steps": 1815, "loss": 0.682, "lr": 3.9120879120879126e-05, "epoch": 0.24467353951890033, "percentage": 4.9, "elapsed_time": "1:16:08", "remaining_time": "1 day, 0:36:39"}
90
+ {"current_steps": 90, "total_steps": 1815, "loss": 0.6851, "lr": 3.956043956043957e-05, "epoch": 0.24742268041237114, "percentage": 4.96, "elapsed_time": "1:16:58", "remaining_time": "1 day, 0:35:23"}
91
+ {"current_steps": 91, "total_steps": 1815, "loss": 0.6807, "lr": 4e-05, "epoch": 0.2501718213058419, "percentage": 5.01, "elapsed_time": "1:17:48", "remaining_time": "1 day, 0:34:10"}
92
+ {"current_steps": 92, "total_steps": 1815, "loss": 0.6782, "lr": 4.043956043956044e-05, "epoch": 0.2529209621993127, "percentage": 5.07, "elapsed_time": "1:18:38", "remaining_time": "1 day, 0:32:53"}
93
+ {"current_steps": 93, "total_steps": 1815, "loss": 0.6759, "lr": 4.087912087912088e-05, "epoch": 0.2556701030927835, "percentage": 5.12, "elapsed_time": "1:19:28", "remaining_time": "1 day, 0:31:39"}
94
+ {"current_steps": 94, "total_steps": 1815, "loss": 0.6873, "lr": 4.131868131868133e-05, "epoch": 0.2584192439862543, "percentage": 5.18, "elapsed_time": "1:20:18", "remaining_time": "1 day, 0:30:26"}
95
+ {"current_steps": 95, "total_steps": 1815, "loss": 0.6814, "lr": 4.1758241758241765e-05, "epoch": 0.2611683848797251, "percentage": 5.23, "elapsed_time": "1:21:08", "remaining_time": "1 day, 0:29:14"}
96
+ {"current_steps": 96, "total_steps": 1815, "loss": 0.682, "lr": 4.21978021978022e-05, "epoch": 0.2639175257731959, "percentage": 5.29, "elapsed_time": "1:21:59", "remaining_time": "1 day, 0:28:04"}
97
+ {"current_steps": 97, "total_steps": 1815, "loss": 0.6828, "lr": 4.263736263736264e-05, "epoch": 0.26666666666666666, "percentage": 5.34, "elapsed_time": "1:22:49", "remaining_time": "1 day, 0:26:52"}
98
+ {"current_steps": 98, "total_steps": 1815, "loss": 0.687, "lr": 4.307692307692308e-05, "epoch": 0.26941580756013744, "percentage": 5.4, "elapsed_time": "1:23:39", "remaining_time": "1 day, 0:25:38"}
99
+ {"current_steps": 99, "total_steps": 1815, "loss": 0.6759, "lr": 4.351648351648352e-05, "epoch": 0.2721649484536082, "percentage": 5.45, "elapsed_time": "1:24:29", "remaining_time": "1 day, 0:24:28"}
100
+ {"current_steps": 100, "total_steps": 1815, "loss": 0.6743, "lr": 4.395604395604396e-05, "epoch": 0.27491408934707906, "percentage": 5.51, "elapsed_time": "1:25:19", "remaining_time": "1 day, 0:23:16"}
101
+ {"current_steps": 101, "total_steps": 1815, "loss": 0.6684, "lr": 4.4395604395604403e-05, "epoch": 0.27766323024054984, "percentage": 5.56, "elapsed_time": "1:26:09", "remaining_time": "1 day, 0:22:16"}
102
+ {"current_steps": 102, "total_steps": 1815, "loss": 0.6643, "lr": 4.483516483516484e-05, "epoch": 0.2804123711340206, "percentage": 5.62, "elapsed_time": "1:27:03", "remaining_time": "1 day, 0:22:06"}
103
+ {"current_steps": 103, "total_steps": 1815, "loss": 0.6878, "lr": 4.527472527472528e-05, "epoch": 0.2831615120274914, "percentage": 5.67, "elapsed_time": "1:28:03", "remaining_time": "1 day, 0:23:38"}
104
+ {"current_steps": 104, "total_steps": 1815, "loss": 0.6889, "lr": 4.5714285714285716e-05, "epoch": 0.2859106529209622, "percentage": 5.73, "elapsed_time": "1:28:53", "remaining_time": "1 day, 0:22:27"}
105
+ {"current_steps": 105, "total_steps": 1815, "loss": 0.6794, "lr": 4.615384615384615e-05, "epoch": 0.28865979381443296, "percentage": 5.79, "elapsed_time": "1:29:43", "remaining_time": "1 day, 0:21:16"}
106
+ {"current_steps": 106, "total_steps": 1815, "loss": 0.6798, "lr": 4.65934065934066e-05, "epoch": 0.2914089347079038, "percentage": 5.84, "elapsed_time": "1:30:33", "remaining_time": "1 day, 0:20:06"}
107
+ {"current_steps": 107, "total_steps": 1815, "loss": 0.6852, "lr": 4.7032967032967035e-05, "epoch": 0.2941580756013746, "percentage": 5.9, "elapsed_time": "1:31:23", "remaining_time": "1 day, 0:18:56"}
108
+ {"current_steps": 108, "total_steps": 1815, "loss": 0.6695, "lr": 4.747252747252748e-05, "epoch": 0.29690721649484536, "percentage": 5.95, "elapsed_time": "1:32:13", "remaining_time": "1 day, 0:17:47"}
109
+ {"current_steps": 109, "total_steps": 1815, "loss": 0.6842, "lr": 4.791208791208791e-05, "epoch": 0.29965635738831614, "percentage": 6.01, "elapsed_time": "1:33:04", "remaining_time": "1 day, 0:16:39"}
110
+ {"current_steps": 110, "total_steps": 1815, "loss": 0.6728, "lr": 4.8351648351648355e-05, "epoch": 0.3024054982817869, "percentage": 6.06, "elapsed_time": "1:33:54", "remaining_time": "1 day, 0:15:28"}
111
+ {"current_steps": 111, "total_steps": 1815, "loss": 0.6804, "lr": 4.87912087912088e-05, "epoch": 0.30515463917525776, "percentage": 6.12, "elapsed_time": "1:34:44", "remaining_time": "1 day, 0:14:19"}
112
+ {"current_steps": 112, "total_steps": 1815, "loss": 0.6673, "lr": 4.923076923076924e-05, "epoch": 0.30790378006872854, "percentage": 6.17, "elapsed_time": "1:35:34", "remaining_time": "1 day, 0:13:10"}
113
+ {"current_steps": 113, "total_steps": 1815, "loss": 0.6782, "lr": 4.9670329670329674e-05, "epoch": 0.3106529209621993, "percentage": 6.23, "elapsed_time": "1:36:24", "remaining_time": "1 day, 0:12:03"}
114
+ {"current_steps": 114, "total_steps": 1815, "loss": 0.6719, "lr": 5.0109890109890116e-05, "epoch": 0.3134020618556701, "percentage": 6.28, "elapsed_time": "1:37:14", "remaining_time": "1 day, 0:10:54"}
115
+ {"current_steps": 115, "total_steps": 1815, "loss": 0.6734, "lr": 5.054945054945055e-05, "epoch": 0.3161512027491409, "percentage": 6.34, "elapsed_time": "1:38:04", "remaining_time": "1 day, 0:09:45"}
116
+ {"current_steps": 116, "total_steps": 1815, "loss": 0.6689, "lr": 5.098901098901099e-05, "epoch": 0.31890034364261166, "percentage": 6.39, "elapsed_time": "1:38:54", "remaining_time": "1 day, 0:08:41"}
117
+ {"current_steps": 117, "total_steps": 1815, "loss": 0.6725, "lr": 5.1428571428571436e-05, "epoch": 0.3216494845360825, "percentage": 6.45, "elapsed_time": "1:39:48", "remaining_time": "1 day, 0:08:27"}
118
+ {"current_steps": 118, "total_steps": 1815, "loss": 0.6788, "lr": 5.186813186813188e-05, "epoch": 0.3243986254295533, "percentage": 6.5, "elapsed_time": "1:40:49", "remaining_time": "1 day, 0:10:02"}
119
+ {"current_steps": 119, "total_steps": 1815, "loss": 0.6717, "lr": 5.230769230769231e-05, "epoch": 0.32714776632302406, "percentage": 6.56, "elapsed_time": "1:41:39", "remaining_time": "1 day, 0:08:52"}
120
+ {"current_steps": 120, "total_steps": 1815, "loss": 0.6754, "lr": 5.274725274725275e-05, "epoch": 0.32989690721649484, "percentage": 6.61, "elapsed_time": "1:42:29", "remaining_time": "1 day, 0:07:43"}
121
+ {"current_steps": 121, "total_steps": 1815, "loss": 0.6689, "lr": 5.318681318681319e-05, "epoch": 0.3326460481099656, "percentage": 6.67, "elapsed_time": "1:43:19", "remaining_time": "1 day, 0:06:36"}
122
+ {"current_steps": 122, "total_steps": 1815, "loss": 0.6822, "lr": 5.3626373626373626e-05, "epoch": 0.3353951890034364, "percentage": 6.72, "elapsed_time": "1:44:09", "remaining_time": "1 day, 0:05:28"}
123
+ {"current_steps": 123, "total_steps": 1815, "loss": 0.6939, "lr": 5.4065934065934074e-05, "epoch": 0.33814432989690724, "percentage": 6.78, "elapsed_time": "1:44:59", "remaining_time": "1 day, 0:04:21"}
124
+ {"current_steps": 124, "total_steps": 1815, "loss": 0.6685, "lr": 5.450549450549451e-05, "epoch": 0.340893470790378, "percentage": 6.83, "elapsed_time": "1:45:49", "remaining_time": "1 day, 0:03:14"}
125
+ {"current_steps": 125, "total_steps": 1815, "loss": 0.6726, "lr": 5.494505494505495e-05, "epoch": 0.3436426116838488, "percentage": 6.89, "elapsed_time": "1:46:40", "remaining_time": "1 day, 0:02:09"}
126
+ {"current_steps": 126, "total_steps": 1815, "loss": 0.6609, "lr": 5.538461538461539e-05, "epoch": 0.3463917525773196, "percentage": 6.94, "elapsed_time": "1:47:30", "remaining_time": "1 day, 0:01:04"}
127
+ {"current_steps": 127, "total_steps": 1815, "loss": 0.68, "lr": 5.582417582417583e-05, "epoch": 0.34914089347079036, "percentage": 7.0, "elapsed_time": "1:48:20", "remaining_time": "1 day, 0:00:00"}
128
+ {"current_steps": 128, "total_steps": 1815, "loss": 0.6715, "lr": 5.626373626373627e-05, "epoch": 0.3518900343642612, "percentage": 7.05, "elapsed_time": "1:49:10", "remaining_time": "23:58:56"}
129
+ {"current_steps": 129, "total_steps": 1815, "loss": 0.6681, "lr": 5.670329670329671e-05, "epoch": 0.354639175257732, "percentage": 7.11, "elapsed_time": "1:50:00", "remaining_time": "23:57:51"}
130
+ {"current_steps": 130, "total_steps": 1815, "loss": 0.6818, "lr": 5.714285714285715e-05, "epoch": 0.35738831615120276, "percentage": 7.16, "elapsed_time": "1:50:50", "remaining_time": "23:56:45"}
131
+ {"current_steps": 131, "total_steps": 1815, "loss": 0.6605, "lr": 5.7582417582417584e-05, "epoch": 0.36013745704467354, "percentage": 7.22, "elapsed_time": "1:51:41", "remaining_time": "23:55:46"}
132
+ {"current_steps": 132, "total_steps": 1815, "loss": 0.6731, "lr": 5.8021978021978026e-05, "epoch": 0.3628865979381443, "percentage": 7.27, "elapsed_time": "1:52:35", "remaining_time": "23:55:38"}
133
+ {"current_steps": 133, "total_steps": 1815, "loss": 0.675, "lr": 5.846153846153846e-05, "epoch": 0.3656357388316151, "percentage": 7.33, "elapsed_time": "1:53:37", "remaining_time": "23:56:55"}
134
+ {"current_steps": 134, "total_steps": 1815, "loss": 0.661, "lr": 5.890109890109891e-05, "epoch": 0.36838487972508593, "percentage": 7.38, "elapsed_time": "1:54:27", "remaining_time": "23:55:51"}
135
+ {"current_steps": 135, "total_steps": 1815, "loss": 0.673, "lr": 5.9340659340659345e-05, "epoch": 0.3711340206185567, "percentage": 7.44, "elapsed_time": "1:55:17", "remaining_time": "23:54:46"}
136
+ {"current_steps": 136, "total_steps": 1815, "loss": 0.6799, "lr": 5.978021978021979e-05, "epoch": 0.3738831615120275, "percentage": 7.49, "elapsed_time": "1:56:07", "remaining_time": "23:53:40"}
137
+ {"current_steps": 137, "total_steps": 1815, "loss": 0.656, "lr": 6.021978021978022e-05, "epoch": 0.3766323024054983, "percentage": 7.55, "elapsed_time": "1:56:57", "remaining_time": "23:52:34"}
138
+ {"current_steps": 138, "total_steps": 1815, "loss": 0.6703, "lr": 6.0659340659340665e-05, "epoch": 0.37938144329896906, "percentage": 7.6, "elapsed_time": "1:57:47", "remaining_time": "23:51:30"}
139
+ {"current_steps": 139, "total_steps": 1815, "loss": 0.6699, "lr": 6.10989010989011e-05, "epoch": 0.38213058419243984, "percentage": 7.66, "elapsed_time": "1:58:38", "remaining_time": "23:50:27"}
140
+ {"current_steps": 140, "total_steps": 1815, "loss": 0.6557, "lr": 6.153846153846155e-05, "epoch": 0.3848797250859107, "percentage": 7.71, "elapsed_time": "1:59:28", "remaining_time": "23:49:22"}
141
+ {"current_steps": 141, "total_steps": 1815, "loss": 0.6659, "lr": 6.197802197802199e-05, "epoch": 0.38762886597938145, "percentage": 7.77, "elapsed_time": "2:00:18", "remaining_time": "23:48:19"}
142
+ {"current_steps": 142, "total_steps": 1815, "loss": 0.6568, "lr": 6.241758241758242e-05, "epoch": 0.39037800687285223, "percentage": 7.82, "elapsed_time": "2:01:08", "remaining_time": "23:47:15"}
143
+ {"current_steps": 143, "total_steps": 1815, "loss": 0.6558, "lr": 6.285714285714286e-05, "epoch": 0.393127147766323, "percentage": 7.88, "elapsed_time": "2:01:58", "remaining_time": "23:46:12"}
144
+ {"current_steps": 144, "total_steps": 1815, "loss": 0.6569, "lr": 6.32967032967033e-05, "epoch": 0.3958762886597938, "percentage": 7.93, "elapsed_time": "2:02:48", "remaining_time": "23:45:09"}
145
+ {"current_steps": 145, "total_steps": 1815, "loss": 0.6664, "lr": 6.373626373626373e-05, "epoch": 0.39862542955326463, "percentage": 7.99, "elapsed_time": "2:03:39", "remaining_time": "23:44:10"}
146
+ {"current_steps": 146, "total_steps": 1815, "loss": 0.6675, "lr": 6.417582417582419e-05, "epoch": 0.4013745704467354, "percentage": 8.04, "elapsed_time": "2:04:29", "remaining_time": "23:43:07"}
147
+ {"current_steps": 147, "total_steps": 1815, "loss": 0.6623, "lr": 6.461538461538462e-05, "epoch": 0.4041237113402062, "percentage": 8.1, "elapsed_time": "2:05:24", "remaining_time": "23:42:55"}
148
+ {"current_steps": 148, "total_steps": 1815, "loss": 0.6658, "lr": 6.505494505494506e-05, "epoch": 0.406872852233677, "percentage": 8.15, "elapsed_time": "2:06:25", "remaining_time": "23:43:57"}
149
+ {"current_steps": 149, "total_steps": 1815, "loss": 0.6539, "lr": 6.54945054945055e-05, "epoch": 0.40962199312714775, "percentage": 8.21, "elapsed_time": "2:07:15", "remaining_time": "23:42:58"}
150
+ {"current_steps": 150, "total_steps": 1815, "loss": 0.672, "lr": 6.593406593406594e-05, "epoch": 0.41237113402061853, "percentage": 8.26, "elapsed_time": "2:08:06", "remaining_time": "23:41:54"}
151
+ {"current_steps": 151, "total_steps": 1815, "loss": 0.6609, "lr": 6.637362637362638e-05, "epoch": 0.41512027491408937, "percentage": 8.32, "elapsed_time": "2:08:56", "remaining_time": "23:40:51"}
152
+ {"current_steps": 152, "total_steps": 1815, "loss": 0.6614, "lr": 6.681318681318683e-05, "epoch": 0.41786941580756015, "percentage": 8.37, "elapsed_time": "2:09:46", "remaining_time": "23:39:48"}
153
+ {"current_steps": 153, "total_steps": 1815, "loss": 0.6558, "lr": 6.725274725274725e-05, "epoch": 0.42061855670103093, "percentage": 8.43, "elapsed_time": "2:10:36", "remaining_time": "23:38:47"}
154
+ {"current_steps": 154, "total_steps": 1815, "loss": 0.6712, "lr": 6.76923076923077e-05, "epoch": 0.4233676975945017, "percentage": 8.48, "elapsed_time": "2:11:26", "remaining_time": "23:37:44"}
155
+ {"current_steps": 155, "total_steps": 1815, "loss": 0.6606, "lr": 6.813186813186814e-05, "epoch": 0.4261168384879725, "percentage": 8.54, "elapsed_time": "2:12:16", "remaining_time": "23:36:42"}
156
+ {"current_steps": 156, "total_steps": 1815, "loss": 0.6662, "lr": 6.857142857142857e-05, "epoch": 0.4288659793814433, "percentage": 8.6, "elapsed_time": "2:13:07", "remaining_time": "23:35:39"}
157
+ {"current_steps": 157, "total_steps": 1815, "loss": 0.6688, "lr": 6.901098901098902e-05, "epoch": 0.4316151202749141, "percentage": 8.65, "elapsed_time": "2:13:57", "remaining_time": "23:34:37"}
158
+ {"current_steps": 158, "total_steps": 1815, "loss": 0.668, "lr": 6.945054945054945e-05, "epoch": 0.4343642611683849, "percentage": 8.71, "elapsed_time": "2:14:47", "remaining_time": "23:33:35"}
159
+ {"current_steps": 159, "total_steps": 1815, "loss": 0.6628, "lr": 6.98901098901099e-05, "epoch": 0.43711340206185567, "percentage": 8.76, "elapsed_time": "2:15:37", "remaining_time": "23:32:33"}
160
+ {"current_steps": 160, "total_steps": 1815, "loss": 0.6616, "lr": 7.032967032967034e-05, "epoch": 0.43986254295532645, "percentage": 8.82, "elapsed_time": "2:16:27", "remaining_time": "23:31:33"}
161
+ {"current_steps": 161, "total_steps": 1815, "loss": 0.6581, "lr": 7.076923076923078e-05, "epoch": 0.44261168384879723, "percentage": 8.87, "elapsed_time": "2:17:18", "remaining_time": "23:30:33"}
162
+ {"current_steps": 162, "total_steps": 1815, "loss": 0.6608, "lr": 7.12087912087912e-05, "epoch": 0.44536082474226807, "percentage": 8.93, "elapsed_time": "2:18:13", "remaining_time": "23:30:26"}
163
+ {"current_steps": 163, "total_steps": 1815, "loss": 0.6451, "lr": 7.164835164835166e-05, "epoch": 0.44810996563573885, "percentage": 8.98, "elapsed_time": "2:19:13", "remaining_time": "23:31:05"}
164
+ {"current_steps": 164, "total_steps": 1815, "loss": 0.6591, "lr": 7.208791208791209e-05, "epoch": 0.45085910652920963, "percentage": 9.04, "elapsed_time": "2:20:03", "remaining_time": "23:30:02"}
165
+ {"current_steps": 165, "total_steps": 1815, "loss": 0.6634, "lr": 7.252747252747253e-05, "epoch": 0.4536082474226804, "percentage": 9.09, "elapsed_time": "2:20:54", "remaining_time": "23:29:00"}
166
+ {"current_steps": 166, "total_steps": 1815, "loss": 0.6611, "lr": 7.296703296703297e-05, "epoch": 0.4563573883161512, "percentage": 9.15, "elapsed_time": "2:21:44", "remaining_time": "23:27:59"}
167
+ {"current_steps": 167, "total_steps": 1815, "loss": 0.6569, "lr": 7.34065934065934e-05, "epoch": 0.45910652920962197, "percentage": 9.2, "elapsed_time": "2:22:34", "remaining_time": "23:26:57"}
168
+ {"current_steps": 168, "total_steps": 1815, "loss": 0.672, "lr": 7.384615384615386e-05, "epoch": 0.4618556701030928, "percentage": 9.26, "elapsed_time": "2:23:24", "remaining_time": "23:25:55"}
169
+ {"current_steps": 169, "total_steps": 1815, "loss": 0.6567, "lr": 7.42857142857143e-05, "epoch": 0.4646048109965636, "percentage": 9.31, "elapsed_time": "2:24:14", "remaining_time": "23:24:54"}
170
+ {"current_steps": 170, "total_steps": 1815, "loss": 0.6606, "lr": 7.472527472527473e-05, "epoch": 0.46735395189003437, "percentage": 9.37, "elapsed_time": "2:25:04", "remaining_time": "23:23:52"}
171
+ {"current_steps": 171, "total_steps": 1815, "loss": 0.6636, "lr": 7.516483516483517e-05, "epoch": 0.47010309278350515, "percentage": 9.42, "elapsed_time": "2:25:54", "remaining_time": "23:22:50"}
172
+ {"current_steps": 172, "total_steps": 1815, "loss": 0.6673, "lr": 7.560439560439561e-05, "epoch": 0.47285223367697593, "percentage": 9.48, "elapsed_time": "2:26:45", "remaining_time": "23:21:49"}
173
+ {"current_steps": 173, "total_steps": 1815, "loss": 0.6572, "lr": 7.604395604395604e-05, "epoch": 0.4756013745704467, "percentage": 9.53, "elapsed_time": "2:27:35", "remaining_time": "23:20:48"}
174
+ {"current_steps": 174, "total_steps": 1815, "loss": 0.6645, "lr": 7.64835164835165e-05, "epoch": 0.47835051546391755, "percentage": 9.59, "elapsed_time": "2:28:25", "remaining_time": "23:19:47"}
175
+ {"current_steps": 175, "total_steps": 1815, "loss": 0.6631, "lr": 7.692307692307693e-05, "epoch": 0.48109965635738833, "percentage": 9.64, "elapsed_time": "2:29:15", "remaining_time": "23:18:49"}
176
+ {"current_steps": 176, "total_steps": 1815, "loss": 0.655, "lr": 7.736263736263737e-05, "epoch": 0.4838487972508591, "percentage": 9.7, "elapsed_time": "2:30:06", "remaining_time": "23:17:48"}
177
+ {"current_steps": 177, "total_steps": 1815, "loss": 0.6617, "lr": 7.780219780219781e-05, "epoch": 0.4865979381443299, "percentage": 9.75, "elapsed_time": "2:31:02", "remaining_time": "23:17:50"}
178
+ {"current_steps": 178, "total_steps": 1815, "loss": 0.665, "lr": 7.824175824175825e-05, "epoch": 0.48934707903780067, "percentage": 9.81, "elapsed_time": "2:32:03", "remaining_time": "23:18:25"}
179
+ {"current_steps": 179, "total_steps": 1815, "loss": 0.6559, "lr": 7.868131868131868e-05, "epoch": 0.49209621993127145, "percentage": 9.86, "elapsed_time": "2:32:53", "remaining_time": "23:17:23"}
180
+ {"current_steps": 180, "total_steps": 1815, "loss": 0.661, "lr": 7.912087912087914e-05, "epoch": 0.4948453608247423, "percentage": 9.92, "elapsed_time": "2:33:43", "remaining_time": "23:16:21"}
181
+ {"current_steps": 181, "total_steps": 1815, "loss": 0.6555, "lr": 7.956043956043956e-05, "epoch": 0.49759450171821307, "percentage": 9.97, "elapsed_time": "2:34:33", "remaining_time": "23:15:20"}
182
+ {"current_steps": 182, "total_steps": 1815, "loss": 0.6672, "lr": 8e-05, "epoch": 0.5003436426116838, "percentage": 10.03, "elapsed_time": "2:35:23", "remaining_time": "23:14:19"}
183
+ {"current_steps": 183, "total_steps": 1815, "loss": 0.669, "lr": 7.999992597860977e-05, "epoch": 0.5030927835051546, "percentage": 10.08, "elapsed_time": "2:36:14", "remaining_time": "23:13:18"}
184
+ {"current_steps": 184, "total_steps": 1815, "loss": 0.6484, "lr": 7.999970391471297e-05, "epoch": 0.5058419243986254, "percentage": 10.14, "elapsed_time": "2:37:04", "remaining_time": "23:12:17"}
185
+ {"current_steps": 185, "total_steps": 1815, "loss": 0.6542, "lr": 7.99993338091315e-05, "epoch": 0.5085910652920962, "percentage": 10.19, "elapsed_time": "2:37:54", "remaining_time": "23:11:17"}
186
+ {"current_steps": 186, "total_steps": 1815, "loss": 0.6666, "lr": 7.999881566323518e-05, "epoch": 0.511340206185567, "percentage": 10.25, "elapsed_time": "2:38:44", "remaining_time": "23:10:17"}
187
+ {"current_steps": 187, "total_steps": 1815, "loss": 0.6713, "lr": 7.999814947894166e-05, "epoch": 0.5140893470790378, "percentage": 10.3, "elapsed_time": "2:39:34", "remaining_time": "23:09:16"}
188
+ {"current_steps": 188, "total_steps": 1815, "loss": 0.6564, "lr": 7.999733525871655e-05, "epoch": 0.5168384879725086, "percentage": 10.36, "elapsed_time": "2:40:24", "remaining_time": "23:08:15"}
189
+ {"current_steps": 189, "total_steps": 1815, "loss": 0.6643, "lr": 7.999637300557334e-05, "epoch": 0.5195876288659794, "percentage": 10.41, "elapsed_time": "2:41:14", "remaining_time": "23:07:15"}
190
+ {"current_steps": 190, "total_steps": 1815, "loss": 0.6466, "lr": 7.999526272307338e-05, "epoch": 0.5223367697594502, "percentage": 10.47, "elapsed_time": "2:42:05", "remaining_time": "23:06:16"}
191
+ {"current_steps": 191, "total_steps": 1815, "loss": 0.6567, "lr": 7.999400441532593e-05, "epoch": 0.525085910652921, "percentage": 10.52, "elapsed_time": "2:42:55", "remaining_time": "23:05:17"}
192
+ {"current_steps": 192, "total_steps": 1815, "loss": 0.6535, "lr": 7.999259808698805e-05, "epoch": 0.5278350515463918, "percentage": 10.58, "elapsed_time": "2:43:52", "remaining_time": "23:05:12"}
193
+ {"current_steps": 193, "total_steps": 1815, "loss": 0.6546, "lr": 7.999104374326465e-05, "epoch": 0.5305841924398625, "percentage": 10.63, "elapsed_time": "2:44:52", "remaining_time": "23:05:38"}
194
+ {"current_steps": 194, "total_steps": 1815, "loss": 0.6495, "lr": 7.99893413899085e-05, "epoch": 0.5333333333333333, "percentage": 10.69, "elapsed_time": "2:45:42", "remaining_time": "23:04:37"}
195
+ {"current_steps": 195, "total_steps": 1815, "loss": 0.6513, "lr": 7.99874910332201e-05, "epoch": 0.5360824742268041, "percentage": 10.74, "elapsed_time": "2:46:32", "remaining_time": "23:03:37"}
196
+ {"current_steps": 196, "total_steps": 1815, "loss": 0.6627, "lr": 7.998549268004776e-05, "epoch": 0.5388316151202749, "percentage": 10.8, "elapsed_time": "2:47:22", "remaining_time": "23:02:37"}
197
+ {"current_steps": 197, "total_steps": 1815, "loss": 0.6552, "lr": 7.998334633778752e-05, "epoch": 0.5415807560137457, "percentage": 10.85, "elapsed_time": "2:48:13", "remaining_time": "23:01:37"}
198
+ {"current_steps": 198, "total_steps": 1815, "loss": 0.6572, "lr": 7.998105201438315e-05, "epoch": 0.5443298969072164, "percentage": 10.91, "elapsed_time": "2:49:03", "remaining_time": "23:00:37"}
199
+ {"current_steps": 199, "total_steps": 1815, "loss": 0.6485, "lr": 7.997860971832609e-05, "epoch": 0.5470790378006873, "percentage": 10.96, "elapsed_time": "2:49:53", "remaining_time": "22:59:37"}
200
+ {"current_steps": 200, "total_steps": 1815, "loss": 0.6567, "lr": 7.997601945865545e-05, "epoch": 0.5498281786941581, "percentage": 11.02, "elapsed_time": "2:50:43", "remaining_time": "22:58:36"}
201
+ {"current_steps": 201, "total_steps": 1815, "loss": 0.6571, "lr": 7.997328124495797e-05, "epoch": 0.5525773195876289, "percentage": 11.07, "elapsed_time": "2:51:33", "remaining_time": "22:57:36"}
202
+ {"current_steps": 202, "total_steps": 1815, "loss": 0.6552, "lr": 7.997039508736794e-05, "epoch": 0.5553264604810997, "percentage": 11.13, "elapsed_time": "2:52:23", "remaining_time": "22:56:35"}
203
+ {"current_steps": 203, "total_steps": 1815, "loss": 0.6504, "lr": 7.996736099656728e-05, "epoch": 0.5580756013745705, "percentage": 11.18, "elapsed_time": "2:53:13", "remaining_time": "22:55:36"}
204
+ {"current_steps": 204, "total_steps": 1815, "loss": 0.6501, "lr": 7.996417898378532e-05, "epoch": 0.5608247422680412, "percentage": 11.24, "elapsed_time": "2:54:04", "remaining_time": "22:54:38"}
205
+ {"current_steps": 205, "total_steps": 1815, "loss": 0.6521, "lr": 7.996084906079895e-05, "epoch": 0.563573883161512, "percentage": 11.29, "elapsed_time": "2:54:54", "remaining_time": "22:53:38"}
206
+ {"current_steps": 206, "total_steps": 1815, "loss": 0.6533, "lr": 7.995737123993242e-05, "epoch": 0.5663230240549828, "percentage": 11.35, "elapsed_time": "2:55:44", "remaining_time": "22:52:38"}
207
+ {"current_steps": 207, "total_steps": 1815, "loss": 0.6423, "lr": 7.99537455340574e-05, "epoch": 0.5690721649484536, "percentage": 11.4, "elapsed_time": "2:56:42", "remaining_time": "22:52:40"}
208
+ {"current_steps": 208, "total_steps": 1815, "loss": 0.6588, "lr": 7.994997195659288e-05, "epoch": 0.5718213058419244, "percentage": 11.46, "elapsed_time": "2:57:42", "remaining_time": "22:53:01"}
209
+ {"current_steps": 209, "total_steps": 1815, "loss": 0.6419, "lr": 7.994605052150512e-05, "epoch": 0.5745704467353951, "percentage": 11.52, "elapsed_time": "2:58:33", "remaining_time": "22:52:02"}
210
+ {"current_steps": 210, "total_steps": 1815, "loss": 0.643, "lr": 7.994198124330764e-05, "epoch": 0.5773195876288659, "percentage": 11.57, "elapsed_time": "2:59:23", "remaining_time": "22:51:02"}
211
+ {"current_steps": 211, "total_steps": 1815, "loss": 0.6658, "lr": 7.99377641370611e-05, "epoch": 0.5800687285223368, "percentage": 11.63, "elapsed_time": "3:00:13", "remaining_time": "22:50:02"}
212
+ {"current_steps": 212, "total_steps": 1815, "loss": 0.6566, "lr": 7.993339921837333e-05, "epoch": 0.5828178694158076, "percentage": 11.68, "elapsed_time": "3:01:03", "remaining_time": "22:49:03"}
213
+ {"current_steps": 213, "total_steps": 1815, "loss": 0.6389, "lr": 7.992888650339918e-05, "epoch": 0.5855670103092784, "percentage": 11.74, "elapsed_time": "3:01:53", "remaining_time": "22:48:02"}
214
+ {"current_steps": 214, "total_steps": 1815, "loss": 0.6539, "lr": 7.992422600884052e-05, "epoch": 0.5883161512027492, "percentage": 11.79, "elapsed_time": "3:02:43", "remaining_time": "22:47:03"}
215
+ {"current_steps": 215, "total_steps": 1815, "loss": 0.6465, "lr": 7.991941775194619e-05, "epoch": 0.5910652920962199, "percentage": 11.85, "elapsed_time": "3:03:33", "remaining_time": "22:46:03"}
216
+ {"current_steps": 216, "total_steps": 1815, "loss": 0.6474, "lr": 7.991446175051184e-05, "epoch": 0.5938144329896907, "percentage": 11.9, "elapsed_time": "3:04:23", "remaining_time": "22:45:04"}
217
+ {"current_steps": 217, "total_steps": 1815, "loss": 0.6512, "lr": 7.990935802288002e-05, "epoch": 0.5965635738831615, "percentage": 11.96, "elapsed_time": "3:05:14", "remaining_time": "22:44:05"}
218
+ {"current_steps": 218, "total_steps": 1815, "loss": 0.6541, "lr": 7.990410658793994e-05, "epoch": 0.5993127147766323, "percentage": 12.01, "elapsed_time": "3:06:04", "remaining_time": "22:43:05"}
219
+ {"current_steps": 219, "total_steps": 1815, "loss": 0.6479, "lr": 7.989870746512756e-05, "epoch": 0.6020618556701031, "percentage": 12.07, "elapsed_time": "3:06:54", "remaining_time": "22:42:08"}
220
+ {"current_steps": 220, "total_steps": 1815, "loss": 0.6497, "lr": 7.989316067442539e-05, "epoch": 0.6048109965635738, "percentage": 12.12, "elapsed_time": "3:07:44", "remaining_time": "22:41:08"}
221
+ {"current_steps": 221, "total_steps": 1815, "loss": 0.6352, "lr": 7.98874662363625e-05, "epoch": 0.6075601374570446, "percentage": 12.18, "elapsed_time": "3:08:34", "remaining_time": "22:40:09"}
222
+ {"current_steps": 222, "total_steps": 1815, "loss": 0.6443, "lr": 7.988162417201437e-05, "epoch": 0.6103092783505155, "percentage": 12.23, "elapsed_time": "3:09:32", "remaining_time": "22:40:03"}
223
+ {"current_steps": 223, "total_steps": 1815, "loss": 0.6652, "lr": 7.987563450300293e-05, "epoch": 0.6130584192439863, "percentage": 12.29, "elapsed_time": "3:10:33", "remaining_time": "22:40:22"}
224
+ {"current_steps": 224, "total_steps": 1815, "loss": 0.6456, "lr": 7.986949725149634e-05, "epoch": 0.6158075601374571, "percentage": 12.34, "elapsed_time": "3:11:23", "remaining_time": "22:39:22"}
225
+ {"current_steps": 225, "total_steps": 1815, "loss": 0.6479, "lr": 7.986321244020901e-05, "epoch": 0.6185567010309279, "percentage": 12.4, "elapsed_time": "3:12:13", "remaining_time": "22:38:23"}
226
+ {"current_steps": 226, "total_steps": 1815, "loss": 0.6595, "lr": 7.985678009240142e-05, "epoch": 0.6213058419243986, "percentage": 12.45, "elapsed_time": "3:13:03", "remaining_time": "22:37:24"}
227
+ {"current_steps": 227, "total_steps": 1815, "loss": 0.6359, "lr": 7.985020023188018e-05, "epoch": 0.6240549828178694, "percentage": 12.51, "elapsed_time": "3:13:53", "remaining_time": "22:36:24"}
228
+ {"current_steps": 228, "total_steps": 1815, "loss": 0.647, "lr": 7.98434728829978e-05, "epoch": 0.6268041237113402, "percentage": 12.56, "elapsed_time": "3:14:43", "remaining_time": "22:35:25"}
229
+ {"current_steps": 229, "total_steps": 1815, "loss": 0.6475, "lr": 7.983659807065267e-05, "epoch": 0.629553264604811, "percentage": 12.62, "elapsed_time": "3:15:33", "remaining_time": "22:34:26"}
230
+ {"current_steps": 230, "total_steps": 1815, "loss": 0.6364, "lr": 7.982957582028892e-05, "epoch": 0.6323024054982818, "percentage": 12.67, "elapsed_time": "3:16:24", "remaining_time": "22:33:27"}
231
+ {"current_steps": 231, "total_steps": 1815, "loss": 0.6426, "lr": 7.982240615789641e-05, "epoch": 0.6350515463917525, "percentage": 12.73, "elapsed_time": "3:17:14", "remaining_time": "22:32:29"}
232
+ {"current_steps": 232, "total_steps": 1815, "loss": 0.6459, "lr": 7.981508911001057e-05, "epoch": 0.6378006872852233, "percentage": 12.78, "elapsed_time": "3:18:04", "remaining_time": "22:31:31"}
233
+ {"current_steps": 233, "total_steps": 1815, "loss": 0.6474, "lr": 7.980762470371228e-05, "epoch": 0.6405498281786941, "percentage": 12.84, "elapsed_time": "3:18:54", "remaining_time": "22:30:32"}
234
+ {"current_steps": 234, "total_steps": 1815, "loss": 0.6469, "lr": 7.980001296662784e-05, "epoch": 0.643298969072165, "percentage": 12.89, "elapsed_time": "3:19:44", "remaining_time": "22:29:34"}
235
+ {"current_steps": 235, "total_steps": 1815, "loss": 0.6354, "lr": 7.979225392692882e-05, "epoch": 0.6460481099656358, "percentage": 12.95, "elapsed_time": "3:20:34", "remaining_time": "22:28:35"}
236
+ {"current_steps": 236, "total_steps": 1815, "loss": 0.6432, "lr": 7.978434761333195e-05, "epoch": 0.6487972508591066, "percentage": 13.0, "elapsed_time": "3:21:25", "remaining_time": "22:27:37"}
237
+ {"current_steps": 237, "total_steps": 1815, "loss": 0.633, "lr": 7.977629405509905e-05, "epoch": 0.6515463917525773, "percentage": 13.06, "elapsed_time": "3:22:23", "remaining_time": "22:27:31"}
238
+ {"current_steps": 238, "total_steps": 1815, "loss": 0.64, "lr": 7.976809328203693e-05, "epoch": 0.6542955326460481, "percentage": 13.11, "elapsed_time": "3:23:24", "remaining_time": "22:27:50"}
239
+ {"current_steps": 239, "total_steps": 1815, "loss": 0.6495, "lr": 7.975974532449718e-05, "epoch": 0.6570446735395189, "percentage": 13.17, "elapsed_time": "3:24:14", "remaining_time": "22:26:50"}
240
+ {"current_steps": 240, "total_steps": 1815, "loss": 0.6337, "lr": 7.975125021337618e-05, "epoch": 0.6597938144329897, "percentage": 13.22, "elapsed_time": "3:25:05", "remaining_time": "22:25:52"}
241
+ {"current_steps": 241, "total_steps": 1815, "loss": 0.6515, "lr": 7.974260798011494e-05, "epoch": 0.6625429553264605, "percentage": 13.28, "elapsed_time": "3:25:55", "remaining_time": "22:24:53"}
242
+ {"current_steps": 242, "total_steps": 1815, "loss": 0.6446, "lr": 7.973381865669897e-05, "epoch": 0.6652920962199312, "percentage": 13.33, "elapsed_time": "3:26:45", "remaining_time": "22:23:53"}
243
+ {"current_steps": 243, "total_steps": 1815, "loss": 0.6424, "lr": 7.972488227565814e-05, "epoch": 0.668041237113402, "percentage": 13.39, "elapsed_time": "3:27:35", "remaining_time": "22:22:55"}
244
+ {"current_steps": 244, "total_steps": 1815, "loss": 0.6356, "lr": 7.971579887006663e-05, "epoch": 0.6707903780068728, "percentage": 13.44, "elapsed_time": "3:28:25", "remaining_time": "22:21:56"}
245
+ {"current_steps": 245, "total_steps": 1815, "loss": 0.6332, "lr": 7.970656847354277e-05, "epoch": 0.6735395189003437, "percentage": 13.5, "elapsed_time": "3:29:15", "remaining_time": "22:20:57"}
246
+ {"current_steps": 246, "total_steps": 1815, "loss": 0.6341, "lr": 7.969719112024889e-05, "epoch": 0.6762886597938145, "percentage": 13.55, "elapsed_time": "3:30:05", "remaining_time": "22:19:58"}
247
+ {"current_steps": 247, "total_steps": 1815, "loss": 0.6351, "lr": 7.968766684489122e-05, "epoch": 0.6790378006872853, "percentage": 13.61, "elapsed_time": "3:30:55", "remaining_time": "22:19:00"}
248
+ {"current_steps": 248, "total_steps": 1815, "loss": 0.636, "lr": 7.967799568271978e-05, "epoch": 0.681786941580756, "percentage": 13.66, "elapsed_time": "3:31:45", "remaining_time": "22:18:00"}
249
+ {"current_steps": 249, "total_steps": 1815, "loss": 0.6483, "lr": 7.96681776695282e-05, "epoch": 0.6845360824742268, "percentage": 13.72, "elapsed_time": "3:32:35", "remaining_time": "22:17:03"}
250
+ {"current_steps": 250, "total_steps": 1815, "loss": 0.6358, "lr": 7.965821284165362e-05, "epoch": 0.6872852233676976, "percentage": 13.77, "elapsed_time": "3:33:25", "remaining_time": "22:16:04"}
251
+ {"current_steps": 251, "total_steps": 1815, "loss": 0.6345, "lr": 7.964810123597659e-05, "epoch": 0.6900343642611684, "percentage": 13.83, "elapsed_time": "3:34:15", "remaining_time": "22:15:05"}
252
+ {"current_steps": 252, "total_steps": 1815, "loss": 0.6416, "lr": 7.963784288992085e-05, "epoch": 0.6927835051546392, "percentage": 13.88, "elapsed_time": "3:35:13", "remaining_time": "22:14:53"}
253
+ {"current_steps": 253, "total_steps": 1815, "loss": 0.6386, "lr": 7.962743784145323e-05, "epoch": 0.6955326460481099, "percentage": 13.94, "elapsed_time": "3:36:15", "remaining_time": "22:15:11"}
254
+ {"current_steps": 254, "total_steps": 1815, "loss": 0.6311, "lr": 7.961688612908358e-05, "epoch": 0.6982817869415807, "percentage": 13.99, "elapsed_time": "3:37:05", "remaining_time": "22:14:11"}
255
+ {"current_steps": 255, "total_steps": 1815, "loss": 0.6342, "lr": 7.96061877918645e-05, "epoch": 0.7010309278350515, "percentage": 14.05, "elapsed_time": "3:37:55", "remaining_time": "22:13:12"}
256
+ {"current_steps": 256, "total_steps": 1815, "loss": 0.6348, "lr": 7.959534286939126e-05, "epoch": 0.7037800687285224, "percentage": 14.1, "elapsed_time": "3:38:45", "remaining_time": "22:12:13"}
257
+ {"current_steps": 257, "total_steps": 1815, "loss": 0.6387, "lr": 7.95843514018017e-05, "epoch": 0.7065292096219932, "percentage": 14.16, "elapsed_time": "3:39:35", "remaining_time": "22:11:14"}
258
+ {"current_steps": 258, "total_steps": 1815, "loss": 0.6408, "lr": 7.9573213429776e-05, "epoch": 0.709278350515464, "percentage": 14.21, "elapsed_time": "3:40:25", "remaining_time": "22:10:15"}
259
+ {"current_steps": 259, "total_steps": 1815, "loss": 0.6403, "lr": 7.956192899453656e-05, "epoch": 0.7120274914089347, "percentage": 14.27, "elapsed_time": "3:41:15", "remaining_time": "22:09:16"}
260
+ {"current_steps": 260, "total_steps": 1815, "loss": 0.6326, "lr": 7.955049813784787e-05, "epoch": 0.7147766323024055, "percentage": 14.33, "elapsed_time": "3:42:05", "remaining_time": "22:08:17"}
261
+ {"current_steps": 261, "total_steps": 1815, "loss": 0.6289, "lr": 7.953892090201633e-05, "epoch": 0.7175257731958763, "percentage": 14.38, "elapsed_time": "3:42:55", "remaining_time": "22:07:19"}
262
+ {"current_steps": 262, "total_steps": 1815, "loss": 0.6311, "lr": 7.952719732989007e-05, "epoch": 0.7202749140893471, "percentage": 14.44, "elapsed_time": "3:43:45", "remaining_time": "22:06:20"}
263
+ {"current_steps": 263, "total_steps": 1815, "loss": 0.6383, "lr": 7.951532746485886e-05, "epoch": 0.7230240549828179, "percentage": 14.49, "elapsed_time": "3:44:35", "remaining_time": "22:05:23"}
264
+ {"current_steps": 264, "total_steps": 1815, "loss": 0.6257, "lr": 7.950331135085389e-05, "epoch": 0.7257731958762886, "percentage": 14.55, "elapsed_time": "3:45:26", "remaining_time": "22:04:26"}
265
+ {"current_steps": 265, "total_steps": 1815, "loss": 0.6431, "lr": 7.949114903234766e-05, "epoch": 0.7285223367697594, "percentage": 14.6, "elapsed_time": "3:46:16", "remaining_time": "22:03:28"}
266
+ {"current_steps": 266, "total_steps": 1815, "loss": 0.636, "lr": 7.947884055435371e-05, "epoch": 0.7312714776632302, "percentage": 14.66, "elapsed_time": "3:47:06", "remaining_time": "22:02:29"}
267
+ {"current_steps": 267, "total_steps": 1815, "loss": 0.6435, "lr": 7.946638596242661e-05, "epoch": 0.734020618556701, "percentage": 14.71, "elapsed_time": "3:48:04", "remaining_time": "22:02:19"}
268
+ {"current_steps": 268, "total_steps": 1815, "loss": 0.6342, "lr": 7.945378530266166e-05, "epoch": 0.7367697594501719, "percentage": 14.77, "elapsed_time": "3:49:06", "remaining_time": "22:02:32"}
269
+ {"current_steps": 269, "total_steps": 1815, "loss": 0.6496, "lr": 7.944103862169478e-05, "epoch": 0.7395189003436426, "percentage": 14.82, "elapsed_time": "3:49:56", "remaining_time": "22:01:33"}
270
+ {"current_steps": 270, "total_steps": 1815, "loss": 0.6536, "lr": 7.94281459667023e-05, "epoch": 0.7422680412371134, "percentage": 14.88, "elapsed_time": "3:50:46", "remaining_time": "22:00:35"}
271
+ {"current_steps": 271, "total_steps": 1815, "loss": 0.6411, "lr": 7.941510738540086e-05, "epoch": 0.7450171821305842, "percentage": 14.93, "elapsed_time": "3:51:36", "remaining_time": "21:59:36"}
272
+ {"current_steps": 272, "total_steps": 1815, "loss": 0.6489, "lr": 7.940192292604714e-05, "epoch": 0.747766323024055, "percentage": 14.99, "elapsed_time": "3:52:26", "remaining_time": "21:58:37"}
273
+ {"current_steps": 273, "total_steps": 1815, "loss": 0.6345, "lr": 7.938859263743776e-05, "epoch": 0.7505154639175258, "percentage": 15.04, "elapsed_time": "3:53:17", "remaining_time": "21:57:40"}
274
+ {"current_steps": 274, "total_steps": 1815, "loss": 0.65, "lr": 7.937511656890903e-05, "epoch": 0.7532646048109966, "percentage": 15.1, "elapsed_time": "3:54:07", "remaining_time": "21:56:42"}
275
+ {"current_steps": 275, "total_steps": 1815, "loss": 0.6391, "lr": 7.936149477033682e-05, "epoch": 0.7560137457044673, "percentage": 15.15, "elapsed_time": "3:54:57", "remaining_time": "21:55:44"}
276
+ {"current_steps": 276, "total_steps": 1815, "loss": 0.638, "lr": 7.934772729213634e-05, "epoch": 0.7587628865979381, "percentage": 15.21, "elapsed_time": "3:55:47", "remaining_time": "21:54:46"}
277
+ {"current_steps": 277, "total_steps": 1815, "loss": 0.6464, "lr": 7.9333814185262e-05, "epoch": 0.7615120274914089, "percentage": 15.26, "elapsed_time": "3:56:37", "remaining_time": "21:53:49"}
278
+ {"current_steps": 278, "total_steps": 1815, "loss": 0.642, "lr": 7.931975550120716e-05, "epoch": 0.7642611683848797, "percentage": 15.32, "elapsed_time": "3:57:27", "remaining_time": "21:52:51"}
279
+ {"current_steps": 279, "total_steps": 1815, "loss": 0.6428, "lr": 7.930555129200402e-05, "epoch": 0.7670103092783506, "percentage": 15.37, "elapsed_time": "3:58:17", "remaining_time": "21:51:55"}
280
+ {"current_steps": 280, "total_steps": 1815, "loss": 0.6333, "lr": 7.929120161022329e-05, "epoch": 0.7697594501718213, "percentage": 15.43, "elapsed_time": "3:59:07", "remaining_time": "21:50:57"}
281
+ {"current_steps": 281, "total_steps": 1815, "loss": 0.6392, "lr": 7.927670650897421e-05, "epoch": 0.7725085910652921, "percentage": 15.48, "elapsed_time": "3:59:58", "remaining_time": "21:50:00"}
282
+ {"current_steps": 282, "total_steps": 1815, "loss": 0.6324, "lr": 7.92620660419041e-05, "epoch": 0.7752577319587629, "percentage": 15.54, "elapsed_time": "4:00:56", "remaining_time": "21:49:47"}
283
+ {"current_steps": 283, "total_steps": 1815, "loss": 0.646, "lr": 7.924728026319837e-05, "epoch": 0.7780068728522337, "percentage": 15.59, "elapsed_time": "4:01:58", "remaining_time": "21:49:57"}
284
+ {"current_steps": 284, "total_steps": 1815, "loss": 0.6403, "lr": 7.923234922758021e-05, "epoch": 0.7807560137457045, "percentage": 15.65, "elapsed_time": "4:02:48", "remaining_time": "21:48:58"}
285
+ {"current_steps": 285, "total_steps": 1815, "loss": 0.6418, "lr": 7.921727299031042e-05, "epoch": 0.7835051546391752, "percentage": 15.7, "elapsed_time": "4:03:38", "remaining_time": "21:48:00"}
286
+ {"current_steps": 286, "total_steps": 1815, "loss": 0.6383, "lr": 7.920205160718721e-05, "epoch": 0.786254295532646, "percentage": 15.76, "elapsed_time": "4:04:29", "remaining_time": "21:47:02"}
287
+ {"current_steps": 287, "total_steps": 1815, "loss": 0.629, "lr": 7.918668513454598e-05, "epoch": 0.7890034364261168, "percentage": 15.81, "elapsed_time": "4:05:19", "remaining_time": "21:46:05"}
288
+ {"current_steps": 288, "total_steps": 1815, "loss": 0.6369, "lr": 7.917117362925907e-05, "epoch": 0.7917525773195876, "percentage": 15.87, "elapsed_time": "4:06:09", "remaining_time": "21:45:06"}
289
+ {"current_steps": 289, "total_steps": 1815, "loss": 0.6265, "lr": 7.915551714873571e-05, "epoch": 0.7945017182130584, "percentage": 15.92, "elapsed_time": "4:06:59", "remaining_time": "21:44:09"}
290
+ {"current_steps": 290, "total_steps": 1815, "loss": 0.6354, "lr": 7.913971575092157e-05, "epoch": 0.7972508591065293, "percentage": 15.98, "elapsed_time": "4:07:49", "remaining_time": "21:43:11"}
291
+ {"current_steps": 291, "total_steps": 1815, "loss": 0.6258, "lr": 7.912376949429875e-05, "epoch": 0.8, "percentage": 16.03, "elapsed_time": "4:08:39", "remaining_time": "21:42:14"}
292
+ {"current_steps": 292, "total_steps": 1815, "loss": 0.6411, "lr": 7.910767843788543e-05, "epoch": 0.8027491408934708, "percentage": 16.09, "elapsed_time": "4:09:29", "remaining_time": "21:41:17"}
293
+ {"current_steps": 293, "total_steps": 1815, "loss": 0.6432, "lr": 7.909144264123575e-05, "epoch": 0.8054982817869416, "percentage": 16.14, "elapsed_time": "4:10:19", "remaining_time": "21:40:20"}
294
+ {"current_steps": 294, "total_steps": 1815, "loss": 0.6489, "lr": 7.90750621644395e-05, "epoch": 0.8082474226804124, "percentage": 16.2, "elapsed_time": "4:11:09", "remaining_time": "21:39:22"}
295
+ {"current_steps": 295, "total_steps": 1815, "loss": 0.6439, "lr": 7.905853706812199e-05, "epoch": 0.8109965635738832, "percentage": 16.25, "elapsed_time": "4:11:59", "remaining_time": "21:38:25"}
296
+ {"current_steps": 296, "total_steps": 1815, "loss": 0.6482, "lr": 7.904186741344373e-05, "epoch": 0.813745704467354, "percentage": 16.31, "elapsed_time": "4:12:49", "remaining_time": "21:37:27"}
297
+ {"current_steps": 297, "total_steps": 1815, "loss": 0.65, "lr": 7.902505326210028e-05, "epoch": 0.8164948453608247, "percentage": 16.36, "elapsed_time": "4:13:48", "remaining_time": "21:37:16"}
298
+ {"current_steps": 298, "total_steps": 1815, "loss": 0.6434, "lr": 7.900809467632197e-05, "epoch": 0.8192439862542955, "percentage": 16.42, "elapsed_time": "4:14:49", "remaining_time": "21:37:13"}
299
+ {"current_steps": 299, "total_steps": 1815, "loss": 0.6553, "lr": 7.899099171887373e-05, "epoch": 0.8219931271477663, "percentage": 16.47, "elapsed_time": "4:15:39", "remaining_time": "21:36:15"}
300
+ {"current_steps": 300, "total_steps": 1815, "loss": 0.6392, "lr": 7.897374445305478e-05, "epoch": 0.8247422680412371, "percentage": 16.53, "elapsed_time": "4:16:29", "remaining_time": "21:35:17"}
301
+ {"current_steps": 301, "total_steps": 1815, "loss": 0.6316, "lr": 7.895635294269843e-05, "epoch": 0.8274914089347079, "percentage": 16.58, "elapsed_time": "4:17:19", "remaining_time": "21:34:19"}
302
+ {"current_steps": 302, "total_steps": 1815, "loss": 0.6352, "lr": 7.893881725217191e-05, "epoch": 0.8302405498281787, "percentage": 16.64, "elapsed_time": "4:18:09", "remaining_time": "21:33:21"}
303
+ {"current_steps": 303, "total_steps": 1815, "loss": 0.6382, "lr": 7.892113744637599e-05, "epoch": 0.8329896907216495, "percentage": 16.69, "elapsed_time": "4:18:59", "remaining_time": "21:32:24"}
304
+ {"current_steps": 304, "total_steps": 1815, "loss": 0.6447, "lr": 7.890331359074488e-05, "epoch": 0.8357388316151203, "percentage": 16.75, "elapsed_time": "4:19:49", "remaining_time": "21:31:27"}
305
+ {"current_steps": 305, "total_steps": 1815, "loss": 0.6292, "lr": 7.888534575124591e-05, "epoch": 0.8384879725085911, "percentage": 16.8, "elapsed_time": "4:20:39", "remaining_time": "21:30:30"}
306
+ {"current_steps": 306, "total_steps": 1815, "loss": 0.6319, "lr": 7.886723399437931e-05, "epoch": 0.8412371134020619, "percentage": 16.86, "elapsed_time": "4:21:29", "remaining_time": "21:29:32"}
307
+ {"current_steps": 307, "total_steps": 1815, "loss": 0.6169, "lr": 7.884897838717792e-05, "epoch": 0.8439862542955326, "percentage": 16.91, "elapsed_time": "4:22:20", "remaining_time": "21:28:35"}
308
+ {"current_steps": 308, "total_steps": 1815, "loss": 0.6355, "lr": 7.883057899720703e-05, "epoch": 0.8467353951890034, "percentage": 16.97, "elapsed_time": "4:23:10", "remaining_time": "21:27:39"}
309
+ {"current_steps": 309, "total_steps": 1815, "loss": 0.6274, "lr": 7.881203589256408e-05, "epoch": 0.8494845360824742, "percentage": 17.02, "elapsed_time": "4:24:00", "remaining_time": "21:26:42"}
310
+ {"current_steps": 310, "total_steps": 1815, "loss": 0.6392, "lr": 7.879334914187836e-05, "epoch": 0.852233676975945, "percentage": 17.08, "elapsed_time": "4:24:50", "remaining_time": "21:25:45"}
311
+ {"current_steps": 311, "total_steps": 1815, "loss": 0.6296, "lr": 7.877451881431086e-05, "epoch": 0.8549828178694158, "percentage": 17.13, "elapsed_time": "4:25:40", "remaining_time": "21:24:48"}
312
+ {"current_steps": 312, "total_steps": 1815, "loss": 0.6352, "lr": 7.87555449795539e-05, "epoch": 0.8577319587628865, "percentage": 17.19, "elapsed_time": "4:26:39", "remaining_time": "21:24:32"}
313
+ {"current_steps": 313, "total_steps": 1815, "loss": 0.6361, "lr": 7.873642770783098e-05, "epoch": 0.8604810996563574, "percentage": 17.25, "elapsed_time": "4:27:41", "remaining_time": "21:24:36"}
314
+ {"current_steps": 314, "total_steps": 1815, "loss": 0.6433, "lr": 7.871716706989645e-05, "epoch": 0.8632302405498282, "percentage": 17.3, "elapsed_time": "4:28:31", "remaining_time": "21:23:38"}
315
+ {"current_steps": 315, "total_steps": 1815, "loss": 0.6346, "lr": 7.869776313703528e-05, "epoch": 0.865979381443299, "percentage": 17.36, "elapsed_time": "4:29:22", "remaining_time": "21:22:41"}
316
+ {"current_steps": 316, "total_steps": 1815, "loss": 0.6287, "lr": 7.867821598106275e-05, "epoch": 0.8687285223367698, "percentage": 17.41, "elapsed_time": "4:30:12", "remaining_time": "21:21:44"}
317
+ {"current_steps": 317, "total_steps": 1815, "loss": 0.6386, "lr": 7.865852567432428e-05, "epoch": 0.8714776632302406, "percentage": 17.47, "elapsed_time": "4:31:02", "remaining_time": "21:20:48"}
318
+ {"current_steps": 318, "total_steps": 1815, "loss": 0.631, "lr": 7.863869228969501e-05, "epoch": 0.8742268041237113, "percentage": 17.52, "elapsed_time": "4:31:52", "remaining_time": "21:19:51"}
319
+ {"current_steps": 319, "total_steps": 1815, "loss": 0.6347, "lr": 7.861871590057971e-05, "epoch": 0.8769759450171821, "percentage": 17.58, "elapsed_time": "4:32:42", "remaining_time": "21:18:54"}
320
+ {"current_steps": 320, "total_steps": 1815, "loss": 0.6346, "lr": 7.85985965809124e-05, "epoch": 0.8797250859106529, "percentage": 17.63, "elapsed_time": "4:33:32", "remaining_time": "21:17:57"}
321
+ {"current_steps": 321, "total_steps": 1815, "loss": 0.6265, "lr": 7.857833440515605e-05, "epoch": 0.8824742268041237, "percentage": 17.69, "elapsed_time": "4:34:22", "remaining_time": "21:17:00"}
322
+ {"current_steps": 322, "total_steps": 1815, "loss": 0.6286, "lr": 7.85579294483024e-05, "epoch": 0.8852233676975945, "percentage": 17.74, "elapsed_time": "4:35:12", "remaining_time": "21:16:03"}
323
+ {"current_steps": 323, "total_steps": 1815, "loss": 0.6256, "lr": 7.85373817858716e-05, "epoch": 0.8879725085910652, "percentage": 17.8, "elapsed_time": "4:36:03", "remaining_time": "21:15:08"}
324
+ {"current_steps": 324, "total_steps": 1815, "loss": 0.6238, "lr": 7.851669149391198e-05, "epoch": 0.8907216494845361, "percentage": 17.85, "elapsed_time": "4:36:53", "remaining_time": "21:14:11"}
325
+ {"current_steps": 325, "total_steps": 1815, "loss": 0.633, "lr": 7.849585864899976e-05, "epoch": 0.8934707903780069, "percentage": 17.91, "elapsed_time": "4:37:43", "remaining_time": "21:13:15"}
326
+ {"current_steps": 326, "total_steps": 1815, "loss": 0.6359, "lr": 7.847488332823873e-05, "epoch": 0.8962199312714777, "percentage": 17.96, "elapsed_time": "4:38:33", "remaining_time": "21:12:18"}
327
+ {"current_steps": 327, "total_steps": 1815, "loss": 0.6375, "lr": 7.845376560926002e-05, "epoch": 0.8989690721649485, "percentage": 18.02, "elapsed_time": "4:39:32", "remaining_time": "21:12:04"}
328
+ {"current_steps": 328, "total_steps": 1815, "loss": 0.6196, "lr": 7.843250557022177e-05, "epoch": 0.9017182130584193, "percentage": 18.07, "elapsed_time": "4:40:34", "remaining_time": "21:12:01"}
329
+ {"current_steps": 329, "total_steps": 1815, "loss": 0.6293, "lr": 7.841110328980887e-05, "epoch": 0.90446735395189, "percentage": 18.13, "elapsed_time": "4:41:24", "remaining_time": "21:11:03"}
330
+ {"current_steps": 330, "total_steps": 1815, "loss": 0.626, "lr": 7.838955884723265e-05, "epoch": 0.9072164948453608, "percentage": 18.18, "elapsed_time": "4:42:14", "remaining_time": "21:10:07"}
331
+ {"current_steps": 331, "total_steps": 1815, "loss": 0.6294, "lr": 7.836787232223058e-05, "epoch": 0.9099656357388316, "percentage": 18.24, "elapsed_time": "4:43:05", "remaining_time": "21:09:10"}
332
+ {"current_steps": 332, "total_steps": 1815, "loss": 0.6431, "lr": 7.8346043795066e-05, "epoch": 0.9127147766323024, "percentage": 18.29, "elapsed_time": "4:43:55", "remaining_time": "21:08:14"}
333
+ {"current_steps": 333, "total_steps": 1815, "loss": 0.6237, "lr": 7.83240733465278e-05, "epoch": 0.9154639175257732, "percentage": 18.35, "elapsed_time": "4:44:45", "remaining_time": "21:07:17"}
334
+ {"current_steps": 334, "total_steps": 1815, "loss": 0.6304, "lr": 7.830196105793017e-05, "epoch": 0.9182130584192439, "percentage": 18.4, "elapsed_time": "4:45:35", "remaining_time": "21:06:20"}
335
+ {"current_steps": 335, "total_steps": 1815, "loss": 0.6311, "lr": 7.827970701111219e-05, "epoch": 0.9209621993127147, "percentage": 18.46, "elapsed_time": "4:46:25", "remaining_time": "21:05:23"}
336
+ {"current_steps": 336, "total_steps": 1815, "loss": 0.6317, "lr": 7.825731128843762e-05, "epoch": 0.9237113402061856, "percentage": 18.51, "elapsed_time": "4:47:15", "remaining_time": "21:04:27"}
337
+ {"current_steps": 337, "total_steps": 1815, "loss": 0.6199, "lr": 7.823477397279464e-05, "epoch": 0.9264604810996564, "percentage": 18.57, "elapsed_time": "4:48:06", "remaining_time": "21:03:32"}
338
+ {"current_steps": 338, "total_steps": 1815, "loss": 0.6248, "lr": 7.821209514759539e-05, "epoch": 0.9292096219931272, "percentage": 18.62, "elapsed_time": "4:48:56", "remaining_time": "21:02:35"}
339
+ {"current_steps": 339, "total_steps": 1815, "loss": 0.6267, "lr": 7.818927489677577e-05, "epoch": 0.931958762886598, "percentage": 18.68, "elapsed_time": "4:49:46", "remaining_time": "21:01:39"}
340
+ {"current_steps": 340, "total_steps": 1815, "loss": 0.6307, "lr": 7.816631330479514e-05, "epoch": 0.9347079037800687, "percentage": 18.73, "elapsed_time": "4:50:36", "remaining_time": "21:00:43"}
341
+ {"current_steps": 341, "total_steps": 1815, "loss": 0.6366, "lr": 7.814321045663594e-05, "epoch": 0.9374570446735395, "percentage": 18.79, "elapsed_time": "4:51:26", "remaining_time": "20:59:47"}
342
+ {"current_steps": 342, "total_steps": 1815, "loss": 0.6284, "lr": 7.811996643780339e-05, "epoch": 0.9402061855670103, "percentage": 18.84, "elapsed_time": "4:52:25", "remaining_time": "20:59:30"}
343
+ {"current_steps": 343, "total_steps": 1815, "loss": 0.6179, "lr": 7.809658133432526e-05, "epoch": 0.9429553264604811, "percentage": 18.9, "elapsed_time": "4:53:27", "remaining_time": "20:59:25"}
344
+ {"current_steps": 344, "total_steps": 1815, "loss": 0.6246, "lr": 7.807305523275142e-05, "epoch": 0.9457044673539519, "percentage": 18.95, "elapsed_time": "4:54:18", "remaining_time": "20:58:28"}
345
+ {"current_steps": 345, "total_steps": 1815, "loss": 0.6186, "lr": 7.804938822015361e-05, "epoch": 0.9484536082474226, "percentage": 19.01, "elapsed_time": "4:55:08", "remaining_time": "20:57:32"}
346
+ {"current_steps": 346, "total_steps": 1815, "loss": 0.6249, "lr": 7.802558038412509e-05, "epoch": 0.9512027491408934, "percentage": 19.06, "elapsed_time": "4:55:58", "remaining_time": "20:56:35"}
347
+ {"current_steps": 347, "total_steps": 1815, "loss": 0.6244, "lr": 7.800163181278033e-05, "epoch": 0.9539518900343643, "percentage": 19.12, "elapsed_time": "4:56:48", "remaining_time": "20:55:39"}
348
+ {"current_steps": 348, "total_steps": 1815, "loss": 0.6264, "lr": 7.797754259475464e-05, "epoch": 0.9567010309278351, "percentage": 19.17, "elapsed_time": "4:57:38", "remaining_time": "20:54:43"}
349
+ {"current_steps": 349, "total_steps": 1815, "loss": 0.6278, "lr": 7.795331281920387e-05, "epoch": 0.9594501718213059, "percentage": 19.23, "elapsed_time": "4:58:28", "remaining_time": "20:53:47"}
350
+ {"current_steps": 350, "total_steps": 1815, "loss": 0.6322, "lr": 7.792894257580415e-05, "epoch": 0.9621993127147767, "percentage": 19.28, "elapsed_time": "4:59:18", "remaining_time": "20:52:50"}
351
+ {"current_steps": 351, "total_steps": 1815, "loss": 0.6251, "lr": 7.790443195475142e-05, "epoch": 0.9649484536082474, "percentage": 19.34, "elapsed_time": "5:00:08", "remaining_time": "20:51:54"}
352
+ {"current_steps": 352, "total_steps": 1815, "loss": 0.6218, "lr": 7.78797810467612e-05, "epoch": 0.9676975945017182, "percentage": 19.39, "elapsed_time": "5:00:59", "remaining_time": "20:50:59"}
353
+ {"current_steps": 353, "total_steps": 1815, "loss": 0.6221, "lr": 7.785498994306821e-05, "epoch": 0.970446735395189, "percentage": 19.45, "elapsed_time": "5:01:49", "remaining_time": "20:50:02"}
354
+ {"current_steps": 354, "total_steps": 1815, "loss": 0.619, "lr": 7.783005873542605e-05, "epoch": 0.9731958762886598, "percentage": 19.5, "elapsed_time": "5:02:39", "remaining_time": "20:49:06"}
355
+ {"current_steps": 355, "total_steps": 1815, "loss": 0.6251, "lr": 7.780498751610684e-05, "epoch": 0.9759450171821306, "percentage": 19.56, "elapsed_time": "5:03:29", "remaining_time": "20:48:10"}
356
+ {"current_steps": 356, "total_steps": 1815, "loss": 0.6357, "lr": 7.777977637790092e-05, "epoch": 0.9786941580756013, "percentage": 19.61, "elapsed_time": "5:04:19", "remaining_time": "20:47:14"}
357
+ {"current_steps": 357, "total_steps": 1815, "loss": 0.6218, "lr": 7.775442541411647e-05, "epoch": 0.9814432989690721, "percentage": 19.67, "elapsed_time": "5:05:19", "remaining_time": "20:46:58"}
358
+ {"current_steps": 358, "total_steps": 1815, "loss": 0.6396, "lr": 7.772893471857915e-05, "epoch": 0.9841924398625429, "percentage": 19.72, "elapsed_time": "5:06:22", "remaining_time": "20:46:52"}
359
+ {"current_steps": 359, "total_steps": 1815, "loss": 0.6368, "lr": 7.77033043856318e-05, "epoch": 0.9869415807560138, "percentage": 19.78, "elapsed_time": "5:07:12", "remaining_time": "20:45:55"}
360
+ {"current_steps": 360, "total_steps": 1815, "loss": 0.6204, "lr": 7.767753451013408e-05, "epoch": 0.9896907216494846, "percentage": 19.83, "elapsed_time": "5:08:02", "remaining_time": "20:44:59"}
361
+ {"current_steps": 361, "total_steps": 1815, "loss": 0.6177, "lr": 7.765162518746207e-05, "epoch": 0.9924398625429554, "percentage": 19.89, "elapsed_time": "5:08:52", "remaining_time": "20:44:03"}
362
+ {"current_steps": 362, "total_steps": 1815, "loss": 0.6288, "lr": 7.762557651350798e-05, "epoch": 0.9951890034364261, "percentage": 19.94, "elapsed_time": "5:09:42", "remaining_time": "20:43:06"}
363
+ {"current_steps": 363, "total_steps": 1815, "loss": 0.6272, "lr": 7.759938858467979e-05, "epoch": 0.9979381443298969, "percentage": 20.0, "elapsed_time": "5:10:32", "remaining_time": "20:42:09"}
364
+ {"current_steps": 364, "total_steps": 1815, "loss": 0.7685, "lr": 7.757306149790082e-05, "epoch": 1.0006872852233677, "percentage": 20.06, "elapsed_time": "5:12:39", "remaining_time": "20:46:22"}
365
+ {"current_steps": 365, "total_steps": 1815, "loss": 0.5916, "lr": 7.754659535060943e-05, "epoch": 1.0034364261168385, "percentage": 20.11, "elapsed_time": "5:13:30", "remaining_time": "20:45:24"}
366
+ {"current_steps": 366, "total_steps": 1815, "loss": 0.5988, "lr": 7.751999024075871e-05, "epoch": 1.0061855670103093, "percentage": 20.17, "elapsed_time": "5:14:20", "remaining_time": "20:44:29"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c384eba8d080710f49b91e001126624bf66dfebeb84f426b879d83206e6e8b3
3
+ size 7160
vocab.json ADDED
The diff for this file is too large to render. See raw diff