penfever commited on
Commit
e710870
·
verified ·
1 Parent(s): dc39b8b

Training in progress, step 1500

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
chat_template.jinja ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
18
+ {%- for message in messages[::-1] %}
19
+ {%- set index = (messages|length - 1) - loop.index0 %}
20
+ {%- if ns.multi_step_tool and message.role == "user" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}
21
+ {%- set ns.multi_step_tool = false %}
22
+ {%- set ns.last_query_index = index %}
23
+ {%- endif %}
24
+ {%- endfor %}
25
+ {%- for message in messages %}
26
+ {%- if message.content is string %}
27
+ {%- set content = message.content %}
28
+ {%- else %}
29
+ {%- set content = '' %}
30
+ {%- endif %}
31
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
32
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
33
+ {%- elif message.role == "assistant" %}
34
+ {%- set reasoning_content = '' %}
35
+ {%- if message.reasoning_content is string %}
36
+ {%- set reasoning_content = message.reasoning_content %}
37
+ {%- else %}
38
+ {%- if '</think>' in content %}
39
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
40
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
41
+ {%- endif %}
42
+ {%- endif %}
43
+ {%- if loop.index0 > ns.last_query_index %}
44
+ {%- if loop.last or (not loop.last and reasoning_content) %}
45
+ {{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}
46
+ {%- else %}
47
+ {{- '<|im_start|>' + message.role + '\n' + content }}
48
+ {%- endif %}
49
+ {%- else %}
50
+ {{- '<|im_start|>' + message.role + '\n' + content }}
51
+ {%- endif %}
52
+ {%- if message.tool_calls %}
53
+ {%- for tool_call in message.tool_calls %}
54
+ {%- if (loop.first and content) or (not loop.first) %}
55
+ {{- '\n' }}
56
+ {%- endif %}
57
+ {%- if tool_call.function %}
58
+ {%- set tool_call = tool_call.function %}
59
+ {%- endif %}
60
+ {{- '<tool_call>\n{"name": "' }}
61
+ {{- tool_call.name }}
62
+ {{- '", "arguments": ' }}
63
+ {%- if tool_call.arguments is string %}
64
+ {{- tool_call.arguments }}
65
+ {%- else %}
66
+ {{- tool_call.arguments | tojson }}
67
+ {%- endif %}
68
+ {{- '}\n</tool_call>' }}
69
+ {%- endfor %}
70
+ {%- endif %}
71
+ {{- '<|im_end|>\n' }}
72
+ {%- elif message.role == "tool" %}
73
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
74
+ {{- '<|im_start|>user' }}
75
+ {%- endif %}
76
+ {{- '\n<tool_response>\n' }}
77
+ {{- content }}
78
+ {{- '\n</tool_response>' }}
79
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
80
+ {{- '<|im_end|>\n' }}
81
+ {%- endif %}
82
+ {%- endif %}
83
+ {%- endfor %}
84
+ {%- if add_generation_prompt %}
85
+ {{- '<|im_start|>assistant\n' }}
86
+ {%- if enable_thinking is defined and enable_thinking is false %}
87
+ {{- '<think>\n\n</think>\n\n' }}
88
+ {%- endif %}
89
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 151645,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 12288,
14
+ "layer_types": [
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention"
51
+ ],
52
+ "max_position_embeddings": 40960,
53
+ "max_window_layers": 36,
54
+ "model_type": "qwen3",
55
+ "num_attention_heads": 32,
56
+ "num_hidden_layers": 36,
57
+ "num_key_value_heads": 8,
58
+ "pad_token_id": 151643,
59
+ "rms_norm_eps": 1e-06,
60
+ "rope_scaling": null,
61
+ "rope_theta": 1000000,
62
+ "sliding_window": null,
63
+ "tie_word_embeddings": false,
64
+ "transformers_version": "4.57.3",
65
+ "use_cache": false,
66
+ "use_sliding_window": false,
67
+ "vocab_size": 151936
68
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_sample": true,
3
+ "eos_token_id": [
4
+ 151645,
5
+ 151643
6
+ ],
7
+ "pad_token_id": 151643,
8
+ "temperature": 0.6,
9
+ "top_k": 20,
10
+ "top_p": 0.95,
11
+ "transformers_version": "4.57.3"
12
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dddd3e32ef26fed4263bf1442a4eda190bb9f9612007081be1cc91621b0d241
3
+ size 4902257696
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cab938e224175ad5493b1704691531a6924604f6133b974c4339141f42c17dd3
3
+ size 4915960368
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91de345c88fc02f90e06655ff2ac7c198d08f367b8cbab3f8cfe9c4acc68a4da
3
+ size 4983068496
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb37bc2c324cbd5616e5a07fc30e53f29daca647917e59c17f305408368b78b5
3
+ size 1580230264
model.safetensors.index.json ADDED
@@ -0,0 +1,407 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 308224,
4
+ "total_size": 16381470720
5
+ },
6
+ "weight_map": {
7
+ "lm_head.weight": "model-00004-of-00004.safetensors",
8
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
31
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
32
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
43
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
53
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
65
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
67
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
86
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
91
+ "model.layers.15.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
98
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.16.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
103
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
110
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
113
+ "model.layers.17.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
122
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.18.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
125
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
127
+ "model.layers.18.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
129
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
130
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
133
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
134
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.19.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
137
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.19.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
139
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
141
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
142
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
143
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
144
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
145
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
146
+ "model.layers.2.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
147
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
148
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
149
+ "model.layers.2.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
150
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
151
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
152
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00004.safetensors",
153
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
154
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
155
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
156
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
157
+ "model.layers.20.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
158
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
159
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
160
+ "model.layers.20.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
161
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
162
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
163
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00004.safetensors",
164
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
165
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
166
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
167
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
168
+ "model.layers.21.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
169
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
170
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
171
+ "model.layers.21.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
172
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
173
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
174
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
175
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.22.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
180
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
181
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
182
+ "model.layers.22.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
183
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
184
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
185
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.23.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.23.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
194
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
197
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.24.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.24.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
206
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
211
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.25.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.25.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
218
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
221
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
223
+ "model.layers.26.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.26.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
230
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
233
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.27.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
235
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.27.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
242
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
245
+ "model.layers.28.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
247
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.28.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
254
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.29.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
257
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
259
+ "model.layers.29.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
261
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
262
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
274
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
275
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
276
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
277
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
278
+ "model.layers.30.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
279
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
280
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
281
+ "model.layers.30.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
282
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
283
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
284
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors",
285
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
286
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
287
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
288
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
289
+ "model.layers.31.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
290
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
291
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
292
+ "model.layers.31.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
293
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
294
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
295
+ "model.layers.32.input_layernorm.weight": "model-00003-of-00004.safetensors",
296
+ "model.layers.32.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
297
+ "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
298
+ "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
299
+ "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
300
+ "model.layers.32.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
301
+ "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
302
+ "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
303
+ "model.layers.32.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
304
+ "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
305
+ "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
306
+ "model.layers.33.input_layernorm.weight": "model-00003-of-00004.safetensors",
307
+ "model.layers.33.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
308
+ "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
309
+ "model.layers.33.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
310
+ "model.layers.33.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
311
+ "model.layers.33.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
312
+ "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
313
+ "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
314
+ "model.layers.33.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
315
+ "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
316
+ "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
317
+ "model.layers.34.input_layernorm.weight": "model-00003-of-00004.safetensors",
318
+ "model.layers.34.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
319
+ "model.layers.34.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
320
+ "model.layers.34.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
321
+ "model.layers.34.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
322
+ "model.layers.34.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
323
+ "model.layers.34.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
324
+ "model.layers.34.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
325
+ "model.layers.34.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
326
+ "model.layers.34.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
327
+ "model.layers.34.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
328
+ "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors",
329
+ "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
330
+ "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
331
+ "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
332
+ "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
333
+ "model.layers.35.self_attn.k_norm.weight": "model-00004-of-00004.safetensors",
334
+ "model.layers.35.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
335
+ "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
336
+ "model.layers.35.self_attn.q_norm.weight": "model-00004-of-00004.safetensors",
337
+ "model.layers.35.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
338
+ "model.layers.35.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
339
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
340
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
341
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
342
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
343
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
344
+ "model.layers.4.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
345
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
346
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
347
+ "model.layers.4.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
348
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
349
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
350
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
351
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
352
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
353
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
354
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
355
+ "model.layers.5.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
356
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
357
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
358
+ "model.layers.5.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
359
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
360
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
361
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
362
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
363
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
364
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
365
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
366
+ "model.layers.6.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
367
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
368
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
369
+ "model.layers.6.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
370
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
371
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
372
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
373
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
374
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
375
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
376
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
377
+ "model.layers.7.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
378
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
379
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
380
+ "model.layers.7.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
381
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
382
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
383
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors",
384
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
385
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
386
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
387
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
388
+ "model.layers.8.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
389
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
390
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
391
+ "model.layers.8.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
392
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
393
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
394
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
395
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
396
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
397
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
398
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
399
+ "model.layers.9.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
400
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
401
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
402
+ "model.layers.9.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
403
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
404
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
405
+ "model.norm.weight": "model-00004-of-00004.safetensors"
406
+ }
407
+ }
run_summary.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "agent_name": "terminus-2",
3
+ "training_start": null,
4
+ "training_end": null,
5
+ "created_by": "DCAgent",
6
+ "base_model_name": "Qwen/Qwen3-8B",
7
+ "dataset_name": "DCAgent/exp_tas_timeout_multiplier_0.25_traces",
8
+ "training_type": "SFT",
9
+ "training_parameters": "https://huggingface.co/laion/exp_tas_timeout_multiplier_0_25_traces/blob/main/config.json",
10
+ "wandb_link": "https://wandb.ai/dogml/OpenThoughts-Agent/runs/sft_exp_tas_timeout_multiplier_0-25_traces_Qwen3-8B",
11
+ "traces_location_s3": null
12
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "clean_up_tokenization_spaces": false,
231
+ "eos_token": "<|im_end|>",
232
+ "errors": "replace",
233
+ "extra_special_tokens": {},
234
+ "model_max_length": 32768,
235
+ "pad_token": "<|endoftext|>",
236
+ "padding_side": "right",
237
+ "split_special_tokens": false,
238
+ "tokenizer_class": "Qwen2Tokenizer",
239
+ "unk_token": null
240
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,338 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 5, "total_steps": 4144, "loss": 1.1892, "lr": 3.855421686746989e-07, "epoch": 0.0084530853761623, "percentage": 0.12, "elapsed_time": "0:00:23", "remaining_time": "5:18:12"}
2
+ {"current_steps": 10, "total_steps": 4144, "loss": 1.1909, "lr": 8.674698795180723e-07, "epoch": 0.0169061707523246, "percentage": 0.24, "elapsed_time": "0:00:37", "remaining_time": "4:20:54"}
3
+ {"current_steps": 15, "total_steps": 4144, "loss": 1.2099, "lr": 1.349397590361446e-06, "epoch": 0.0253592561284869, "percentage": 0.36, "elapsed_time": "0:00:52", "remaining_time": "4:01:19"}
4
+ {"current_steps": 20, "total_steps": 4144, "loss": 1.1232, "lr": 1.8313253012048193e-06, "epoch": 0.0338123415046492, "percentage": 0.48, "elapsed_time": "0:01:08", "remaining_time": "3:55:47"}
5
+ {"current_steps": 25, "total_steps": 4144, "loss": 1.1038, "lr": 2.313253012048193e-06, "epoch": 0.042265426880811495, "percentage": 0.6, "elapsed_time": "0:01:23", "remaining_time": "3:48:48"}
6
+ {"current_steps": 30, "total_steps": 4144, "loss": 1.0545, "lr": 2.7951807228915666e-06, "epoch": 0.0507185122569738, "percentage": 0.72, "elapsed_time": "0:01:39", "remaining_time": "3:46:58"}
7
+ {"current_steps": 35, "total_steps": 4144, "loss": 1.0262, "lr": 3.2771084337349403e-06, "epoch": 0.05917159763313609, "percentage": 0.84, "elapsed_time": "0:01:53", "remaining_time": "3:42:16"}
8
+ {"current_steps": 40, "total_steps": 4144, "loss": 1.015, "lr": 3.7590361445783136e-06, "epoch": 0.0676246830092984, "percentage": 0.97, "elapsed_time": "0:02:09", "remaining_time": "3:40:41"}
9
+ {"current_steps": 45, "total_steps": 4144, "loss": 0.931, "lr": 4.240963855421687e-06, "epoch": 0.0760777683854607, "percentage": 1.09, "elapsed_time": "0:02:23", "remaining_time": "3:37:39"}
10
+ {"current_steps": 50, "total_steps": 4144, "loss": 0.9053, "lr": 4.7228915662650606e-06, "epoch": 0.08453085376162299, "percentage": 1.21, "elapsed_time": "0:02:37", "remaining_time": "3:34:34"}
11
+ {"current_steps": 55, "total_steps": 4144, "loss": 0.8944, "lr": 5.204819277108434e-06, "epoch": 0.09298393913778528, "percentage": 1.33, "elapsed_time": "0:02:50", "remaining_time": "3:30:53"}
12
+ {"current_steps": 60, "total_steps": 4144, "loss": 0.8293, "lr": 5.686746987951808e-06, "epoch": 0.1014370245139476, "percentage": 1.45, "elapsed_time": "0:03:02", "remaining_time": "3:27:24"}
13
+ {"current_steps": 65, "total_steps": 4144, "loss": 0.8331, "lr": 6.168674698795182e-06, "epoch": 0.10989010989010989, "percentage": 1.57, "elapsed_time": "0:03:16", "remaining_time": "3:25:35"}
14
+ {"current_steps": 70, "total_steps": 4144, "loss": 0.7559, "lr": 6.6506024096385545e-06, "epoch": 0.11834319526627218, "percentage": 1.69, "elapsed_time": "0:03:28", "remaining_time": "3:22:26"}
15
+ {"current_steps": 75, "total_steps": 4144, "loss": 0.7641, "lr": 7.132530120481929e-06, "epoch": 0.12679628064243448, "percentage": 1.81, "elapsed_time": "0:03:41", "remaining_time": "3:20:09"}
16
+ {"current_steps": 80, "total_steps": 4144, "loss": 0.7635, "lr": 7.614457831325302e-06, "epoch": 0.1352493660185968, "percentage": 1.93, "elapsed_time": "0:03:54", "remaining_time": "3:18:48"}
17
+ {"current_steps": 85, "total_steps": 4144, "loss": 0.7347, "lr": 8.096385542168676e-06, "epoch": 0.1437024513947591, "percentage": 2.05, "elapsed_time": "0:04:10", "remaining_time": "3:19:05"}
18
+ {"current_steps": 90, "total_steps": 4144, "loss": 0.7447, "lr": 8.57831325301205e-06, "epoch": 0.1521555367709214, "percentage": 2.17, "elapsed_time": "0:04:22", "remaining_time": "3:17:05"}
19
+ {"current_steps": 95, "total_steps": 4144, "loss": 0.6786, "lr": 9.060240963855423e-06, "epoch": 0.16060862214708369, "percentage": 2.29, "elapsed_time": "0:04:35", "remaining_time": "3:15:41"}
20
+ {"current_steps": 100, "total_steps": 4144, "loss": 0.6916, "lr": 9.542168674698796e-06, "epoch": 0.16906170752324598, "percentage": 2.41, "elapsed_time": "0:04:48", "remaining_time": "3:14:26"}
21
+ {"current_steps": 105, "total_steps": 4144, "loss": 0.7096, "lr": 1.002409638554217e-05, "epoch": 0.17751479289940827, "percentage": 2.53, "elapsed_time": "0:05:00", "remaining_time": "3:12:26"}
22
+ {"current_steps": 110, "total_steps": 4144, "loss": 0.7169, "lr": 1.0506024096385542e-05, "epoch": 0.18596787827557057, "percentage": 2.65, "elapsed_time": "0:05:13", "remaining_time": "3:11:45"}
23
+ {"current_steps": 115, "total_steps": 4144, "loss": 0.6696, "lr": 1.0987951807228916e-05, "epoch": 0.1944209636517329, "percentage": 2.78, "elapsed_time": "0:05:26", "remaining_time": "3:10:39"}
24
+ {"current_steps": 120, "total_steps": 4144, "loss": 0.7062, "lr": 1.146987951807229e-05, "epoch": 0.2028740490278952, "percentage": 2.9, "elapsed_time": "0:05:40", "remaining_time": "3:10:20"}
25
+ {"current_steps": 125, "total_steps": 4144, "loss": 0.6603, "lr": 1.1951807228915664e-05, "epoch": 0.21132713440405748, "percentage": 3.02, "elapsed_time": "0:05:53", "remaining_time": "3:09:31"}
26
+ {"current_steps": 130, "total_steps": 4144, "loss": 0.6573, "lr": 1.2433734939759037e-05, "epoch": 0.21978021978021978, "percentage": 3.14, "elapsed_time": "0:06:05", "remaining_time": "3:08:10"}
27
+ {"current_steps": 135, "total_steps": 4144, "loss": 0.6899, "lr": 1.291566265060241e-05, "epoch": 0.22823330515638207, "percentage": 3.26, "elapsed_time": "0:06:17", "remaining_time": "3:06:56"}
28
+ {"current_steps": 140, "total_steps": 4144, "loss": 0.7076, "lr": 1.3397590361445785e-05, "epoch": 0.23668639053254437, "percentage": 3.38, "elapsed_time": "0:06:29", "remaining_time": "3:05:38"}
29
+ {"current_steps": 145, "total_steps": 4144, "loss": 0.6918, "lr": 1.3879518072289157e-05, "epoch": 0.2451394759087067, "percentage": 3.5, "elapsed_time": "0:06:41", "remaining_time": "3:04:36"}
30
+ {"current_steps": 150, "total_steps": 4144, "loss": 0.6942, "lr": 1.436144578313253e-05, "epoch": 0.25359256128486896, "percentage": 3.62, "elapsed_time": "0:06:53", "remaining_time": "3:03:23"}
31
+ {"current_steps": 155, "total_steps": 4144, "loss": 0.6837, "lr": 1.4843373493975905e-05, "epoch": 0.26204564666103125, "percentage": 3.74, "elapsed_time": "0:07:06", "remaining_time": "3:02:56"}
32
+ {"current_steps": 160, "total_steps": 4144, "loss": 0.6325, "lr": 1.532530120481928e-05, "epoch": 0.2704987320371936, "percentage": 3.86, "elapsed_time": "0:07:18", "remaining_time": "3:01:47"}
33
+ {"current_steps": 165, "total_steps": 4144, "loss": 0.6327, "lr": 1.580722891566265e-05, "epoch": 0.2789518174133559, "percentage": 3.98, "elapsed_time": "0:07:29", "remaining_time": "3:00:46"}
34
+ {"current_steps": 170, "total_steps": 4144, "loss": 0.6693, "lr": 1.6289156626506025e-05, "epoch": 0.2874049027895182, "percentage": 4.1, "elapsed_time": "0:07:41", "remaining_time": "2:59:39"}
35
+ {"current_steps": 175, "total_steps": 4144, "loss": 0.6323, "lr": 1.67710843373494e-05, "epoch": 0.2958579881656805, "percentage": 4.22, "elapsed_time": "0:07:53", "remaining_time": "2:59:07"}
36
+ {"current_steps": 180, "total_steps": 4144, "loss": 0.6363, "lr": 1.725301204819277e-05, "epoch": 0.3043110735418428, "percentage": 4.34, "elapsed_time": "0:08:06", "remaining_time": "2:58:42"}
37
+ {"current_steps": 185, "total_steps": 4144, "loss": 0.627, "lr": 1.7734939759036146e-05, "epoch": 0.3127641589180051, "percentage": 4.46, "elapsed_time": "0:08:20", "remaining_time": "2:58:21"}
38
+ {"current_steps": 190, "total_steps": 4144, "loss": 0.6267, "lr": 1.821686746987952e-05, "epoch": 0.32121724429416737, "percentage": 4.58, "elapsed_time": "0:08:32", "remaining_time": "2:57:48"}
39
+ {"current_steps": 195, "total_steps": 4144, "loss": 0.6428, "lr": 1.8698795180722893e-05, "epoch": 0.32967032967032966, "percentage": 4.71, "elapsed_time": "0:08:44", "remaining_time": "2:57:11"}
40
+ {"current_steps": 200, "total_steps": 4144, "loss": 0.6355, "lr": 1.9180722891566265e-05, "epoch": 0.33812341504649196, "percentage": 4.83, "elapsed_time": "0:08:56", "remaining_time": "2:56:24"}
41
+ {"current_steps": 205, "total_steps": 4144, "loss": 0.6567, "lr": 1.966265060240964e-05, "epoch": 0.34657650042265425, "percentage": 4.95, "elapsed_time": "0:09:07", "remaining_time": "2:55:22"}
42
+ {"current_steps": 210, "total_steps": 4144, "loss": 0.6746, "lr": 2.0144578313253014e-05, "epoch": 0.35502958579881655, "percentage": 5.07, "elapsed_time": "0:09:18", "remaining_time": "2:54:28"}
43
+ {"current_steps": 215, "total_steps": 4144, "loss": 0.614, "lr": 2.062650602409639e-05, "epoch": 0.36348267117497884, "percentage": 5.19, "elapsed_time": "0:09:30", "remaining_time": "2:53:45"}
44
+ {"current_steps": 220, "total_steps": 4144, "loss": 0.6562, "lr": 2.110843373493976e-05, "epoch": 0.37193575655114114, "percentage": 5.31, "elapsed_time": "0:09:43", "remaining_time": "2:53:28"}
45
+ {"current_steps": 225, "total_steps": 4144, "loss": 0.6531, "lr": 2.1590361445783136e-05, "epoch": 0.3803888419273035, "percentage": 5.43, "elapsed_time": "0:09:56", "remaining_time": "2:53:07"}
46
+ {"current_steps": 230, "total_steps": 4144, "loss": 0.6191, "lr": 2.207228915662651e-05, "epoch": 0.3888419273034658, "percentage": 5.55, "elapsed_time": "0:10:10", "remaining_time": "2:53:01"}
47
+ {"current_steps": 235, "total_steps": 4144, "loss": 0.6637, "lr": 2.255421686746988e-05, "epoch": 0.3972950126796281, "percentage": 5.67, "elapsed_time": "0:10:21", "remaining_time": "2:52:13"}
48
+ {"current_steps": 240, "total_steps": 4144, "loss": 0.6531, "lr": 2.3036144578313254e-05, "epoch": 0.4057480980557904, "percentage": 5.79, "elapsed_time": "0:10:35", "remaining_time": "2:52:13"}
49
+ {"current_steps": 245, "total_steps": 4144, "loss": 0.6481, "lr": 2.351807228915663e-05, "epoch": 0.41420118343195267, "percentage": 5.91, "elapsed_time": "0:10:50", "remaining_time": "2:52:25"}
50
+ {"current_steps": 250, "total_steps": 4144, "loss": 0.6625, "lr": 2.4e-05, "epoch": 0.42265426880811496, "percentage": 6.03, "elapsed_time": "0:11:01", "remaining_time": "2:51:50"}
51
+ {"current_steps": 255, "total_steps": 4144, "loss": 0.6333, "lr": 2.4481927710843376e-05, "epoch": 0.43110735418427726, "percentage": 6.15, "elapsed_time": "0:11:14", "remaining_time": "2:51:21"}
52
+ {"current_steps": 260, "total_steps": 4144, "loss": 0.5766, "lr": 2.496385542168675e-05, "epoch": 0.43956043956043955, "percentage": 6.27, "elapsed_time": "0:11:26", "remaining_time": "2:50:57"}
53
+ {"current_steps": 265, "total_steps": 4144, "loss": 0.6231, "lr": 2.5445783132530122e-05, "epoch": 0.44801352493660185, "percentage": 6.39, "elapsed_time": "0:11:39", "remaining_time": "2:50:38"}
54
+ {"current_steps": 270, "total_steps": 4144, "loss": 0.6109, "lr": 2.5927710843373497e-05, "epoch": 0.45646661031276414, "percentage": 6.52, "elapsed_time": "0:11:52", "remaining_time": "2:50:25"}
55
+ {"current_steps": 275, "total_steps": 4144, "loss": 0.6267, "lr": 2.640963855421687e-05, "epoch": 0.46491969568892644, "percentage": 6.64, "elapsed_time": "0:12:04", "remaining_time": "2:49:55"}
56
+ {"current_steps": 280, "total_steps": 4144, "loss": 0.6215, "lr": 2.6891566265060244e-05, "epoch": 0.47337278106508873, "percentage": 6.76, "elapsed_time": "0:12:15", "remaining_time": "2:49:05"}
57
+ {"current_steps": 285, "total_steps": 4144, "loss": 0.6087, "lr": 2.737349397590362e-05, "epoch": 0.4818258664412511, "percentage": 6.88, "elapsed_time": "0:12:27", "remaining_time": "2:48:46"}
58
+ {"current_steps": 290, "total_steps": 4144, "loss": 0.5679, "lr": 2.7855421686746987e-05, "epoch": 0.4902789518174134, "percentage": 7.0, "elapsed_time": "0:12:40", "remaining_time": "2:48:26"}
59
+ {"current_steps": 295, "total_steps": 4144, "loss": 0.6338, "lr": 2.8337349397590365e-05, "epoch": 0.4987320371935757, "percentage": 7.12, "elapsed_time": "0:12:52", "remaining_time": "2:48:01"}
60
+ {"current_steps": 300, "total_steps": 4144, "loss": 0.6199, "lr": 2.881927710843374e-05, "epoch": 0.5071851225697379, "percentage": 7.24, "elapsed_time": "0:13:04", "remaining_time": "2:47:31"}
61
+ {"current_steps": 305, "total_steps": 4144, "loss": 0.6221, "lr": 2.9301204819277108e-05, "epoch": 0.5156382079459002, "percentage": 7.36, "elapsed_time": "0:13:15", "remaining_time": "2:46:55"}
62
+ {"current_steps": 310, "total_steps": 4144, "loss": 0.6265, "lr": 2.9783132530120483e-05, "epoch": 0.5240912933220625, "percentage": 7.48, "elapsed_time": "0:13:29", "remaining_time": "2:46:55"}
63
+ {"current_steps": 315, "total_steps": 4144, "loss": 0.6513, "lr": 3.0265060240963858e-05, "epoch": 0.5325443786982249, "percentage": 7.6, "elapsed_time": "0:13:41", "remaining_time": "2:46:21"}
64
+ {"current_steps": 320, "total_steps": 4144, "loss": 0.5937, "lr": 3.074698795180723e-05, "epoch": 0.5409974640743872, "percentage": 7.72, "elapsed_time": "0:13:52", "remaining_time": "2:45:47"}
65
+ {"current_steps": 325, "total_steps": 4144, "loss": 0.5974, "lr": 3.122891566265061e-05, "epoch": 0.5494505494505495, "percentage": 7.84, "elapsed_time": "0:14:04", "remaining_time": "2:45:24"}
66
+ {"current_steps": 330, "total_steps": 4144, "loss": 0.5939, "lr": 3.171084337349398e-05, "epoch": 0.5579036348267118, "percentage": 7.96, "elapsed_time": "0:14:17", "remaining_time": "2:45:15"}
67
+ {"current_steps": 335, "total_steps": 4144, "loss": 0.5841, "lr": 3.219277108433735e-05, "epoch": 0.5663567202028741, "percentage": 8.08, "elapsed_time": "0:14:29", "remaining_time": "2:44:48"}
68
+ {"current_steps": 340, "total_steps": 4144, "loss": 0.6166, "lr": 3.267469879518072e-05, "epoch": 0.5748098055790364, "percentage": 8.2, "elapsed_time": "0:14:42", "remaining_time": "2:44:31"}
69
+ {"current_steps": 345, "total_steps": 4144, "loss": 0.613, "lr": 3.31566265060241e-05, "epoch": 0.5832628909551987, "percentage": 8.33, "elapsed_time": "0:14:55", "remaining_time": "2:44:19"}
70
+ {"current_steps": 350, "total_steps": 4144, "loss": 0.6287, "lr": 3.363855421686747e-05, "epoch": 0.591715976331361, "percentage": 8.45, "elapsed_time": "0:15:06", "remaining_time": "2:43:46"}
71
+ {"current_steps": 355, "total_steps": 4144, "loss": 0.6111, "lr": 3.4120481927710844e-05, "epoch": 0.6001690617075233, "percentage": 8.57, "elapsed_time": "0:15:18", "remaining_time": "2:43:23"}
72
+ {"current_steps": 360, "total_steps": 4144, "loss": 0.5737, "lr": 3.460240963855422e-05, "epoch": 0.6086221470836856, "percentage": 8.69, "elapsed_time": "0:15:29", "remaining_time": "2:42:54"}
73
+ {"current_steps": 365, "total_steps": 4144, "loss": 0.5936, "lr": 3.5084337349397594e-05, "epoch": 0.6170752324598479, "percentage": 8.81, "elapsed_time": "0:15:41", "remaining_time": "2:42:26"}
74
+ {"current_steps": 370, "total_steps": 4144, "loss": 0.6079, "lr": 3.5566265060240966e-05, "epoch": 0.6255283178360102, "percentage": 8.93, "elapsed_time": "0:15:52", "remaining_time": "2:41:59"}
75
+ {"current_steps": 375, "total_steps": 4144, "loss": 0.5999, "lr": 3.604819277108434e-05, "epoch": 0.6339814032121724, "percentage": 9.05, "elapsed_time": "0:16:04", "remaining_time": "2:41:38"}
76
+ {"current_steps": 380, "total_steps": 4144, "loss": 0.5842, "lr": 3.6530120481927716e-05, "epoch": 0.6424344885883347, "percentage": 9.17, "elapsed_time": "0:16:17", "remaining_time": "2:41:23"}
77
+ {"current_steps": 385, "total_steps": 4144, "loss": 0.5815, "lr": 3.701204819277109e-05, "epoch": 0.650887573964497, "percentage": 9.29, "elapsed_time": "0:16:29", "remaining_time": "2:41:00"}
78
+ {"current_steps": 390, "total_steps": 4144, "loss": 0.5935, "lr": 3.749397590361446e-05, "epoch": 0.6593406593406593, "percentage": 9.41, "elapsed_time": "0:16:41", "remaining_time": "2:40:36"}
79
+ {"current_steps": 395, "total_steps": 4144, "loss": 0.5798, "lr": 3.797590361445784e-05, "epoch": 0.6677937447168216, "percentage": 9.53, "elapsed_time": "0:16:53", "remaining_time": "2:40:16"}
80
+ {"current_steps": 400, "total_steps": 4144, "loss": 0.6184, "lr": 3.845783132530121e-05, "epoch": 0.6762468300929839, "percentage": 9.65, "elapsed_time": "0:17:07", "remaining_time": "2:40:13"}
81
+ {"current_steps": 405, "total_steps": 4144, "loss": 0.5632, "lr": 3.893975903614458e-05, "epoch": 0.6846999154691462, "percentage": 9.77, "elapsed_time": "0:17:18", "remaining_time": "2:39:51"}
82
+ {"current_steps": 410, "total_steps": 4144, "loss": 0.5849, "lr": 3.942168674698795e-05, "epoch": 0.6931530008453085, "percentage": 9.89, "elapsed_time": "0:17:30", "remaining_time": "2:39:26"}
83
+ {"current_steps": 415, "total_steps": 4144, "loss": 0.5874, "lr": 3.990361445783133e-05, "epoch": 0.7016060862214708, "percentage": 10.01, "elapsed_time": "0:17:41", "remaining_time": "2:39:01"}
84
+ {"current_steps": 420, "total_steps": 4144, "loss": 0.5888, "lr": 3.999988643760311e-05, "epoch": 0.7100591715976331, "percentage": 10.14, "elapsed_time": "0:17:54", "remaining_time": "2:38:48"}
85
+ {"current_steps": 425, "total_steps": 4144, "loss": 0.6278, "lr": 3.999942509257603e-05, "epoch": 0.7185122569737954, "percentage": 10.26, "elapsed_time": "0:18:07", "remaining_time": "2:38:36"}
86
+ {"current_steps": 430, "total_steps": 4144, "loss": 0.598, "lr": 3.9998608875448804e-05, "epoch": 0.7269653423499577, "percentage": 10.38, "elapsed_time": "0:18:19", "remaining_time": "2:38:19"}
87
+ {"current_steps": 435, "total_steps": 4144, "loss": 0.5662, "lr": 3.9997437800704486e-05, "epoch": 0.73541842772612, "percentage": 10.5, "elapsed_time": "0:18:32", "remaining_time": "2:38:02"}
88
+ {"current_steps": 440, "total_steps": 4144, "loss": 0.612, "lr": 3.9995911889122764e-05, "epoch": 0.7438715131022823, "percentage": 10.62, "elapsed_time": "0:18:45", "remaining_time": "2:37:56"}
89
+ {"current_steps": 445, "total_steps": 4144, "loss": 0.5787, "lr": 3.999403116777959e-05, "epoch": 0.7523245984784447, "percentage": 10.74, "elapsed_time": "0:18:56", "remaining_time": "2:37:28"}
90
+ {"current_steps": 450, "total_steps": 4144, "loss": 0.5681, "lr": 3.999179567004669e-05, "epoch": 0.760777683854607, "percentage": 10.86, "elapsed_time": "0:19:08", "remaining_time": "2:37:07"}
91
+ {"current_steps": 455, "total_steps": 4144, "loss": 0.61, "lr": 3.998920543559101e-05, "epoch": 0.7692307692307693, "percentage": 10.98, "elapsed_time": "0:19:20", "remaining_time": "2:36:51"}
92
+ {"current_steps": 460, "total_steps": 4144, "loss": 0.6054, "lr": 3.998626051037396e-05, "epoch": 0.7776838546069316, "percentage": 11.1, "elapsed_time": "0:19:34", "remaining_time": "2:36:48"}
93
+ {"current_steps": 465, "total_steps": 4144, "loss": 0.567, "lr": 3.998296094665062e-05, "epoch": 0.7861369399830939, "percentage": 11.22, "elapsed_time": "0:19:47", "remaining_time": "2:36:33"}
94
+ {"current_steps": 470, "total_steps": 4144, "loss": 0.5849, "lr": 3.9979306802968845e-05, "epoch": 0.7945900253592562, "percentage": 11.34, "elapsed_time": "0:20:00", "remaining_time": "2:36:21"}
95
+ {"current_steps": 475, "total_steps": 4144, "loss": 0.5722, "lr": 3.997529814416818e-05, "epoch": 0.8030431107354185, "percentage": 11.46, "elapsed_time": "0:20:13", "remaining_time": "2:36:13"}
96
+ {"current_steps": 480, "total_steps": 4144, "loss": 0.6183, "lr": 3.997093504137872e-05, "epoch": 0.8114961961115807, "percentage": 11.58, "elapsed_time": "0:20:25", "remaining_time": "2:35:53"}
97
+ {"current_steps": 485, "total_steps": 4144, "loss": 0.5773, "lr": 3.996621757201986e-05, "epoch": 0.819949281487743, "percentage": 11.7, "elapsed_time": "0:20:35", "remaining_time": "2:35:23"}
98
+ {"current_steps": 490, "total_steps": 4144, "loss": 0.5949, "lr": 3.996114581979893e-05, "epoch": 0.8284023668639053, "percentage": 11.82, "elapsed_time": "0:20:47", "remaining_time": "2:35:05"}
99
+ {"current_steps": 495, "total_steps": 4144, "loss": 0.5959, "lr": 3.995571987470968e-05, "epoch": 0.8368554522400676, "percentage": 11.94, "elapsed_time": "0:20:59", "remaining_time": "2:34:45"}
100
+ {"current_steps": 500, "total_steps": 4144, "loss": 0.5792, "lr": 3.99499398330307e-05, "epoch": 0.8453085376162299, "percentage": 12.07, "elapsed_time": "0:21:11", "remaining_time": "2:34:29"}
101
+ {"current_steps": 505, "total_steps": 4144, "loss": 0.5905, "lr": 3.994380579732372e-05, "epoch": 0.8537616229923922, "percentage": 12.19, "elapsed_time": "0:21:22", "remaining_time": "2:34:02"}
102
+ {"current_steps": 510, "total_steps": 4144, "loss": 0.5441, "lr": 3.993731787643178e-05, "epoch": 0.8622147083685545, "percentage": 12.31, "elapsed_time": "0:21:36", "remaining_time": "2:34:01"}
103
+ {"current_steps": 515, "total_steps": 4144, "loss": 0.5838, "lr": 3.9930476185477286e-05, "epoch": 0.8706677937447168, "percentage": 12.43, "elapsed_time": "0:21:48", "remaining_time": "2:33:37"}
104
+ {"current_steps": 520, "total_steps": 4144, "loss": 0.5815, "lr": 3.992328084585999e-05, "epoch": 0.8791208791208791, "percentage": 12.55, "elapsed_time": "0:21:59", "remaining_time": "2:33:16"}
105
+ {"current_steps": 525, "total_steps": 4144, "loss": 0.5448, "lr": 3.991573198525484e-05, "epoch": 0.8875739644970414, "percentage": 12.67, "elapsed_time": "0:22:11", "remaining_time": "2:32:57"}
106
+ {"current_steps": 530, "total_steps": 4144, "loss": 0.5556, "lr": 3.9907829737609664e-05, "epoch": 0.8960270498732037, "percentage": 12.79, "elapsed_time": "0:22:23", "remaining_time": "2:32:40"}
107
+ {"current_steps": 535, "total_steps": 4144, "loss": 0.587, "lr": 3.989957424314287e-05, "epoch": 0.904480135249366, "percentage": 12.91, "elapsed_time": "0:22:36", "remaining_time": "2:32:28"}
108
+ {"current_steps": 540, "total_steps": 4144, "loss": 0.587, "lr": 3.98909656483409e-05, "epoch": 0.9129332206255283, "percentage": 13.03, "elapsed_time": "0:22:47", "remaining_time": "2:32:05"}
109
+ {"current_steps": 545, "total_steps": 4144, "loss": 0.5533, "lr": 3.988200410595564e-05, "epoch": 0.9213863060016906, "percentage": 13.15, "elapsed_time": "0:22:59", "remaining_time": "2:31:47"}
110
+ {"current_steps": 550, "total_steps": 4144, "loss": 0.5688, "lr": 3.9872689775001745e-05, "epoch": 0.9298393913778529, "percentage": 13.27, "elapsed_time": "0:23:10", "remaining_time": "2:31:25"}
111
+ {"current_steps": 555, "total_steps": 4144, "loss": 0.5317, "lr": 3.986302282075377e-05, "epoch": 0.9382924767540152, "percentage": 13.39, "elapsed_time": "0:23:22", "remaining_time": "2:31:07"}
112
+ {"current_steps": 560, "total_steps": 4144, "loss": 0.5451, "lr": 3.985300341474325e-05, "epoch": 0.9467455621301775, "percentage": 13.51, "elapsed_time": "0:23:34", "remaining_time": "2:30:54"}
113
+ {"current_steps": 565, "total_steps": 4144, "loss": 0.5423, "lr": 3.9842631734755696e-05, "epoch": 0.9551986475063398, "percentage": 13.63, "elapsed_time": "0:23:47", "remaining_time": "2:30:42"}
114
+ {"current_steps": 570, "total_steps": 4144, "loss": 0.5919, "lr": 3.9831907964827386e-05, "epoch": 0.9636517328825022, "percentage": 13.75, "elapsed_time": "0:23:58", "remaining_time": "2:30:17"}
115
+ {"current_steps": 575, "total_steps": 4144, "loss": 0.6039, "lr": 3.982083229524213e-05, "epoch": 0.9721048182586645, "percentage": 13.88, "elapsed_time": "0:24:09", "remaining_time": "2:29:59"}
116
+ {"current_steps": 580, "total_steps": 4144, "loss": 0.5927, "lr": 3.9809404922527865e-05, "epoch": 0.9805579036348268, "percentage": 14.0, "elapsed_time": "0:24:22", "remaining_time": "2:29:48"}
117
+ {"current_steps": 585, "total_steps": 4144, "loss": 0.5799, "lr": 3.979762604945324e-05, "epoch": 0.989010989010989, "percentage": 14.12, "elapsed_time": "0:24:36", "remaining_time": "2:29:40"}
118
+ {"current_steps": 590, "total_steps": 4144, "loss": 0.5604, "lr": 3.97854958850239e-05, "epoch": 0.9974640743871513, "percentage": 14.24, "elapsed_time": "0:24:49", "remaining_time": "2:29:32"}
119
+ {"current_steps": 595, "total_steps": 4144, "loss": 0.5785, "lr": 3.97730146444789e-05, "epoch": 1.0050718512256973, "percentage": 14.36, "elapsed_time": "0:25:00", "remaining_time": "2:29:11"}
120
+ {"current_steps": 600, "total_steps": 4144, "loss": 0.5621, "lr": 3.97601825492868e-05, "epoch": 1.0135249366018597, "percentage": 14.48, "elapsed_time": "0:25:13", "remaining_time": "2:28:58"}
121
+ {"current_steps": 605, "total_steps": 4144, "loss": 0.5452, "lr": 3.974699982714178e-05, "epoch": 1.021978021978022, "percentage": 14.6, "elapsed_time": "0:25:25", "remaining_time": "2:28:41"}
122
+ {"current_steps": 610, "total_steps": 4144, "loss": 0.518, "lr": 3.973346671195958e-05, "epoch": 1.0304311073541843, "percentage": 14.72, "elapsed_time": "0:25:37", "remaining_time": "2:28:24"}
123
+ {"current_steps": 615, "total_steps": 4144, "loss": 0.5522, "lr": 3.971958344387335e-05, "epoch": 1.0388841927303465, "percentage": 14.84, "elapsed_time": "0:25:49", "remaining_time": "2:28:09"}
124
+ {"current_steps": 620, "total_steps": 4144, "loss": 0.5562, "lr": 3.9705350269229386e-05, "epoch": 1.047337278106509, "percentage": 14.96, "elapsed_time": "0:26:00", "remaining_time": "2:27:48"}
125
+ {"current_steps": 625, "total_steps": 4144, "loss": 0.5368, "lr": 3.9690767440582784e-05, "epoch": 1.055790363482671, "percentage": 15.08, "elapsed_time": "0:26:12", "remaining_time": "2:27:36"}
126
+ {"current_steps": 630, "total_steps": 4144, "loss": 0.5019, "lr": 3.967583521669294e-05, "epoch": 1.0642434488588335, "percentage": 15.2, "elapsed_time": "0:26:24", "remaining_time": "2:27:15"}
127
+ {"current_steps": 635, "total_steps": 4144, "loss": 0.5136, "lr": 3.966055386251895e-05, "epoch": 1.072696534234996, "percentage": 15.32, "elapsed_time": "0:26:35", "remaining_time": "2:26:57"}
128
+ {"current_steps": 640, "total_steps": 4144, "loss": 0.5326, "lr": 3.9644923649214915e-05, "epoch": 1.081149619611158, "percentage": 15.44, "elapsed_time": "0:26:47", "remaining_time": "2:26:38"}
129
+ {"current_steps": 645, "total_steps": 4144, "loss": 0.5457, "lr": 3.9628944854125144e-05, "epoch": 1.0896027049873203, "percentage": 15.56, "elapsed_time": "0:26:58", "remaining_time": "2:26:20"}
130
+ {"current_steps": 650, "total_steps": 4144, "loss": 0.5591, "lr": 3.961261776077922e-05, "epoch": 1.0980557903634827, "percentage": 15.69, "elapsed_time": "0:27:10", "remaining_time": "2:26:03"}
131
+ {"current_steps": 655, "total_steps": 4144, "loss": 0.5389, "lr": 3.9595942658886946e-05, "epoch": 1.106508875739645, "percentage": 15.81, "elapsed_time": "0:27:22", "remaining_time": "2:25:46"}
132
+ {"current_steps": 660, "total_steps": 4144, "loss": 0.5209, "lr": 3.957891984433327e-05, "epoch": 1.1149619611158073, "percentage": 15.93, "elapsed_time": "0:27:33", "remaining_time": "2:25:26"}
133
+ {"current_steps": 665, "total_steps": 4144, "loss": 0.5245, "lr": 3.956154961917297e-05, "epoch": 1.1234150464919697, "percentage": 16.05, "elapsed_time": "0:27:44", "remaining_time": "2:25:08"}
134
+ {"current_steps": 670, "total_steps": 4144, "loss": 0.5121, "lr": 3.954383229162531e-05, "epoch": 1.1318681318681318, "percentage": 16.17, "elapsed_time": "0:27:56", "remaining_time": "2:24:51"}
135
+ {"current_steps": 675, "total_steps": 4144, "loss": 0.5199, "lr": 3.952576817606859e-05, "epoch": 1.1403212172442942, "percentage": 16.29, "elapsed_time": "0:28:08", "remaining_time": "2:24:35"}
136
+ {"current_steps": 680, "total_steps": 4144, "loss": 0.5409, "lr": 3.950735759303456e-05, "epoch": 1.1487743026204564, "percentage": 16.41, "elapsed_time": "0:28:19", "remaining_time": "2:24:17"}
137
+ {"current_steps": 685, "total_steps": 4144, "loss": 0.5298, "lr": 3.948860086920273e-05, "epoch": 1.1572273879966188, "percentage": 16.53, "elapsed_time": "0:28:30", "remaining_time": "2:23:59"}
138
+ {"current_steps": 690, "total_steps": 4144, "loss": 0.5228, "lr": 3.9469498337394555e-05, "epoch": 1.165680473372781, "percentage": 16.65, "elapsed_time": "0:28:42", "remaining_time": "2:23:41"}
139
+ {"current_steps": 695, "total_steps": 4144, "loss": 0.5045, "lr": 3.945005033656756e-05, "epoch": 1.1741335587489434, "percentage": 16.77, "elapsed_time": "0:28:54", "remaining_time": "2:23:29"}
140
+ {"current_steps": 700, "total_steps": 4144, "loss": 0.5396, "lr": 3.9430257211809306e-05, "epoch": 1.1825866441251056, "percentage": 16.89, "elapsed_time": "0:29:06", "remaining_time": "2:23:13"}
141
+ {"current_steps": 705, "total_steps": 4144, "loss": 0.5574, "lr": 3.941011931433129e-05, "epoch": 1.191039729501268, "percentage": 17.01, "elapsed_time": "0:29:18", "remaining_time": "2:22:57"}
142
+ {"current_steps": 710, "total_steps": 4144, "loss": 0.5278, "lr": 3.9389637001462664e-05, "epoch": 1.1994928148774302, "percentage": 17.13, "elapsed_time": "0:29:32", "remaining_time": "2:22:53"}
143
+ {"current_steps": 715, "total_steps": 4144, "loss": 0.501, "lr": 3.936881063664395e-05, "epoch": 1.2079459002535926, "percentage": 17.25, "elapsed_time": "0:29:44", "remaining_time": "2:22:35"}
144
+ {"current_steps": 720, "total_steps": 4144, "loss": 0.4902, "lr": 3.934764058942058e-05, "epoch": 1.2163989856297548, "percentage": 17.37, "elapsed_time": "0:29:56", "remaining_time": "2:22:22"}
145
+ {"current_steps": 725, "total_steps": 4144, "loss": 0.5602, "lr": 3.932612723543628e-05, "epoch": 1.2248520710059172, "percentage": 17.5, "elapsed_time": "0:30:08", "remaining_time": "2:22:10"}
146
+ {"current_steps": 730, "total_steps": 4144, "loss": 0.5213, "lr": 3.93042709564265e-05, "epoch": 1.2333051563820794, "percentage": 17.62, "elapsed_time": "0:30:20", "remaining_time": "2:21:53"}
147
+ {"current_steps": 735, "total_steps": 4144, "loss": 0.5359, "lr": 3.928207214021156e-05, "epoch": 1.2417582417582418, "percentage": 17.74, "elapsed_time": "0:30:32", "remaining_time": "2:21:40"}
148
+ {"current_steps": 740, "total_steps": 4144, "loss": 0.4942, "lr": 3.9259531180689804e-05, "epoch": 1.2502113271344042, "percentage": 17.86, "elapsed_time": "0:30:44", "remaining_time": "2:21:24"}
149
+ {"current_steps": 745, "total_steps": 4144, "loss": 0.4902, "lr": 3.9236648477830626e-05, "epoch": 1.2586644125105664, "percentage": 17.98, "elapsed_time": "0:30:56", "remaining_time": "2:21:08"}
150
+ {"current_steps": 750, "total_steps": 4144, "loss": 0.559, "lr": 3.921342443766733e-05, "epoch": 1.2671174978867286, "percentage": 18.1, "elapsed_time": "0:31:07", "remaining_time": "2:20:50"}
151
+ {"current_steps": 755, "total_steps": 4144, "loss": 0.5342, "lr": 3.9189859472289956e-05, "epoch": 1.275570583262891, "percentage": 18.22, "elapsed_time": "0:31:18", "remaining_time": "2:20:33"}
152
+ {"current_steps": 760, "total_steps": 4144, "loss": 0.5328, "lr": 3.916595399983796e-05, "epoch": 1.2840236686390534, "percentage": 18.34, "elapsed_time": "0:31:29", "remaining_time": "2:20:15"}
153
+ {"current_steps": 765, "total_steps": 4144, "loss": 0.4986, "lr": 3.91417084444928e-05, "epoch": 1.2924767540152156, "percentage": 18.46, "elapsed_time": "0:31:42", "remaining_time": "2:20:01"}
154
+ {"current_steps": 770, "total_steps": 4144, "loss": 0.5048, "lr": 3.91171232364704e-05, "epoch": 1.3009298393913777, "percentage": 18.58, "elapsed_time": "0:31:54", "remaining_time": "2:19:48"}
155
+ {"current_steps": 775, "total_steps": 4144, "loss": 0.4943, "lr": 3.9092198812013515e-05, "epoch": 1.3093829247675401, "percentage": 18.7, "elapsed_time": "0:32:06", "remaining_time": "2:19:33"}
156
+ {"current_steps": 780, "total_steps": 4144, "loss": 0.5023, "lr": 3.9066935613384004e-05, "epoch": 1.3178360101437026, "percentage": 18.82, "elapsed_time": "0:32:19", "remaining_time": "2:19:22"}
157
+ {"current_steps": 785, "total_steps": 4144, "loss": 0.5419, "lr": 3.9041334088854984e-05, "epoch": 1.3262890955198647, "percentage": 18.94, "elapsed_time": "0:32:30", "remaining_time": "2:19:05"}
158
+ {"current_steps": 790, "total_steps": 4144, "loss": 0.5153, "lr": 3.901539469270283e-05, "epoch": 1.334742180896027, "percentage": 19.06, "elapsed_time": "0:32:41", "remaining_time": "2:18:48"}
159
+ {"current_steps": 795, "total_steps": 4144, "loss": 0.5364, "lr": 3.8989117885199184e-05, "epoch": 1.3431952662721893, "percentage": 19.18, "elapsed_time": "0:32:54", "remaining_time": "2:18:39"}
160
+ {"current_steps": 800, "total_steps": 4144, "loss": 0.5061, "lr": 3.896250413260273e-05, "epoch": 1.3516483516483517, "percentage": 19.31, "elapsed_time": "0:33:06", "remaining_time": "2:18:24"}
161
+ {"current_steps": 805, "total_steps": 4144, "loss": 0.5383, "lr": 3.8935553907150974e-05, "epoch": 1.360101437024514, "percentage": 19.43, "elapsed_time": "0:33:18", "remaining_time": "2:18:11"}
162
+ {"current_steps": 810, "total_steps": 4144, "loss": 0.526, "lr": 3.8908267687051806e-05, "epoch": 1.3685545224006763, "percentage": 19.55, "elapsed_time": "0:33:30", "remaining_time": "2:17:54"}
163
+ {"current_steps": 815, "total_steps": 4144, "loss": 0.5069, "lr": 3.8880645956475064e-05, "epoch": 1.3770076077768385, "percentage": 19.67, "elapsed_time": "0:33:42", "remaining_time": "2:17:43"}
164
+ {"current_steps": 820, "total_steps": 4144, "loss": 0.5089, "lr": 3.88526892055439e-05, "epoch": 1.385460693153001, "percentage": 19.79, "elapsed_time": "0:33:55", "remaining_time": "2:17:29"}
165
+ {"current_steps": 825, "total_steps": 4144, "loss": 0.5299, "lr": 3.882439793032614e-05, "epoch": 1.393913778529163, "percentage": 19.91, "elapsed_time": "0:34:06", "remaining_time": "2:17:12"}
166
+ {"current_steps": 830, "total_steps": 4144, "loss": 0.5354, "lr": 3.8795772632825405e-05, "epoch": 1.4023668639053255, "percentage": 20.03, "elapsed_time": "0:34:18", "remaining_time": "2:16:57"}
167
+ {"current_steps": 835, "total_steps": 4144, "loss": 0.5106, "lr": 3.8766813820972276e-05, "epoch": 1.4108199492814877, "percentage": 20.15, "elapsed_time": "0:34:30", "remaining_time": "2:16:43"}
168
+ {"current_steps": 840, "total_steps": 4144, "loss": 0.5126, "lr": 3.8737522008615247e-05, "epoch": 1.41927303465765, "percentage": 20.27, "elapsed_time": "0:34:43", "remaining_time": "2:16:33"}
169
+ {"current_steps": 845, "total_steps": 4144, "loss": 0.5388, "lr": 3.87078977155116e-05, "epoch": 1.4277261200338123, "percentage": 20.39, "elapsed_time": "0:34:54", "remaining_time": "2:16:16"}
170
+ {"current_steps": 850, "total_steps": 4144, "loss": 0.5573, "lr": 3.867794146731822e-05, "epoch": 1.4361792054099747, "percentage": 20.51, "elapsed_time": "0:35:05", "remaining_time": "2:15:58"}
171
+ {"current_steps": 855, "total_steps": 4144, "loss": 0.5282, "lr": 3.864765379558219e-05, "epoch": 1.4446322907861369, "percentage": 20.63, "elapsed_time": "0:35:18", "remaining_time": "2:15:48"}
172
+ {"current_steps": 860, "total_steps": 4144, "loss": 0.5003, "lr": 3.861703523773146e-05, "epoch": 1.4530853761622993, "percentage": 20.75, "elapsed_time": "0:35:29", "remaining_time": "2:15:32"}
173
+ {"current_steps": 865, "total_steps": 4144, "loss": 0.5406, "lr": 3.858608633706525e-05, "epoch": 1.4615384615384617, "percentage": 20.87, "elapsed_time": "0:35:41", "remaining_time": "2:15:17"}
174
+ {"current_steps": 870, "total_steps": 4144, "loss": 0.5505, "lr": 3.855480764274441e-05, "epoch": 1.4699915469146239, "percentage": 20.99, "elapsed_time": "0:35:52", "remaining_time": "2:15:02"}
175
+ {"current_steps": 875, "total_steps": 4144, "loss": 0.5153, "lr": 3.852319970978168e-05, "epoch": 1.478444632290786, "percentage": 21.11, "elapsed_time": "0:36:04", "remaining_time": "2:14:47"}
176
+ {"current_steps": 880, "total_steps": 4144, "loss": 0.5124, "lr": 3.8491263099031874e-05, "epoch": 1.4868977176669484, "percentage": 21.24, "elapsed_time": "0:36:15", "remaining_time": "2:14:30"}
177
+ {"current_steps": 885, "total_steps": 4144, "loss": 0.5308, "lr": 3.845899837718188e-05, "epoch": 1.4953508030431109, "percentage": 21.36, "elapsed_time": "0:36:28", "remaining_time": "2:14:19"}
178
+ {"current_steps": 890, "total_steps": 4144, "loss": 0.5386, "lr": 3.842640611674064e-05, "epoch": 1.503803888419273, "percentage": 21.48, "elapsed_time": "0:36:40", "remaining_time": "2:14:05"}
179
+ {"current_steps": 895, "total_steps": 4144, "loss": 0.5086, "lr": 3.839348689602897e-05, "epoch": 1.5122569737954352, "percentage": 21.6, "elapsed_time": "0:36:51", "remaining_time": "2:13:47"}
180
+ {"current_steps": 900, "total_steps": 4144, "loss": 0.5522, "lr": 3.836024129916931e-05, "epoch": 1.5207100591715976, "percentage": 21.72, "elapsed_time": "0:37:03", "remaining_time": "2:13:35"}
181
+ {"current_steps": 905, "total_steps": 4144, "loss": 0.5241, "lr": 3.832666991607536e-05, "epoch": 1.52916314454776, "percentage": 21.84, "elapsed_time": "0:37:15", "remaining_time": "2:13:19"}
182
+ {"current_steps": 910, "total_steps": 4144, "loss": 0.5006, "lr": 3.82927733424416e-05, "epoch": 1.5376162299239222, "percentage": 21.96, "elapsed_time": "0:37:27", "remaining_time": "2:13:06"}
183
+ {"current_steps": 915, "total_steps": 4144, "loss": 0.511, "lr": 3.8258552179732736e-05, "epoch": 1.5460693153000844, "percentage": 22.08, "elapsed_time": "0:37:38", "remaining_time": "2:12:49"}
184
+ {"current_steps": 920, "total_steps": 4144, "loss": 0.52, "lr": 3.822400703517301e-05, "epoch": 1.5545224006762468, "percentage": 22.2, "elapsed_time": "0:37:50", "remaining_time": "2:12:36"}
185
+ {"current_steps": 925, "total_steps": 4144, "loss": 0.5468, "lr": 3.8189138521735454e-05, "epoch": 1.5629754860524092, "percentage": 22.32, "elapsed_time": "0:38:02", "remaining_time": "2:12:21"}
186
+ {"current_steps": 930, "total_steps": 4144, "loss": 0.5113, "lr": 3.8153947258130976e-05, "epoch": 1.5714285714285714, "percentage": 22.44, "elapsed_time": "0:38:13", "remaining_time": "2:12:07"}
187
+ {"current_steps": 935, "total_steps": 4144, "loss": 0.5643, "lr": 3.8118433868797406e-05, "epoch": 1.5798816568047336, "percentage": 22.56, "elapsed_time": "0:38:27", "remaining_time": "2:12:00"}
188
+ {"current_steps": 940, "total_steps": 4144, "loss": 0.4803, "lr": 3.8082598983888414e-05, "epoch": 1.588334742180896, "percentage": 22.68, "elapsed_time": "0:38:39", "remaining_time": "2:11:45"}
189
+ {"current_steps": 945, "total_steps": 4144, "loss": 0.5492, "lr": 3.804644323926232e-05, "epoch": 1.5967878275570584, "percentage": 22.8, "elapsed_time": "0:38:51", "remaining_time": "2:11:33"}
190
+ {"current_steps": 950, "total_steps": 4144, "loss": 0.5258, "lr": 3.800996727647081e-05, "epoch": 1.6052409129332206, "percentage": 22.92, "elapsed_time": "0:39:02", "remaining_time": "2:11:17"}
191
+ {"current_steps": 955, "total_steps": 4144, "loss": 0.5135, "lr": 3.7973171742747584e-05, "epoch": 1.6136939983093828, "percentage": 23.05, "elapsed_time": "0:39:15", "remaining_time": "2:11:05"}
192
+ {"current_steps": 960, "total_steps": 4144, "loss": 0.5126, "lr": 3.793605729099681e-05, "epoch": 1.6221470836855452, "percentage": 23.17, "elapsed_time": "0:39:27", "remaining_time": "2:10:52"}
193
+ {"current_steps": 965, "total_steps": 4144, "loss": 0.511, "lr": 3.7898624579781586e-05, "epoch": 1.6306001690617076, "percentage": 23.29, "elapsed_time": "0:39:39", "remaining_time": "2:10:37"}
194
+ {"current_steps": 970, "total_steps": 4144, "loss": 0.5241, "lr": 3.786087427331226e-05, "epoch": 1.63905325443787, "percentage": 23.41, "elapsed_time": "0:39:50", "remaining_time": "2:10:22"}
195
+ {"current_steps": 975, "total_steps": 4144, "loss": 0.5324, "lr": 3.7822807041434615e-05, "epoch": 1.6475063398140322, "percentage": 23.53, "elapsed_time": "0:40:03", "remaining_time": "2:10:10"}
196
+ {"current_steps": 980, "total_steps": 4144, "loss": 0.5135, "lr": 3.7784423559617985e-05, "epoch": 1.6559594251901943, "percentage": 23.65, "elapsed_time": "0:40:14", "remaining_time": "2:09:56"}
197
+ {"current_steps": 985, "total_steps": 4144, "loss": 0.4934, "lr": 3.774572450894329e-05, "epoch": 1.6644125105663568, "percentage": 23.77, "elapsed_time": "0:40:26", "remaining_time": "2:09:41"}
198
+ {"current_steps": 990, "total_steps": 4144, "loss": 0.5391, "lr": 3.7706710576090936e-05, "epoch": 1.6728655959425192, "percentage": 23.89, "elapsed_time": "0:40:38", "remaining_time": "2:09:29"}
199
+ {"current_steps": 995, "total_steps": 4144, "loss": 0.5225, "lr": 3.766738245332865e-05, "epoch": 1.6813186813186813, "percentage": 24.01, "elapsed_time": "0:40:50", "remaining_time": "2:09:15"}
200
+ {"current_steps": 1000, "total_steps": 4144, "loss": 0.5054, "lr": 3.762774083849915e-05, "epoch": 1.6897717666948435, "percentage": 24.13, "elapsed_time": "0:41:01", "remaining_time": "2:08:59"}
201
+ {"current_steps": 1005, "total_steps": 4144, "loss": 0.5428, "lr": 3.7587786435007825e-05, "epoch": 1.698224852071006, "percentage": 24.25, "elapsed_time": "0:41:12", "remaining_time": "2:08:42"}
202
+ {"current_steps": 1010, "total_steps": 4144, "loss": 0.5208, "lr": 3.7547519951810196e-05, "epoch": 1.7066779374471683, "percentage": 24.37, "elapsed_time": "0:41:24", "remaining_time": "2:08:29"}
203
+ {"current_steps": 1015, "total_steps": 4144, "loss": 0.5325, "lr": 3.7506942103399385e-05, "epoch": 1.7151310228233305, "percentage": 24.49, "elapsed_time": "0:41:36", "remaining_time": "2:08:15"}
204
+ {"current_steps": 1020, "total_steps": 4144, "loss": 0.533, "lr": 3.74660536097934e-05, "epoch": 1.7235841081994927, "percentage": 24.61, "elapsed_time": "0:41:48", "remaining_time": "2:08:02"}
205
+ {"current_steps": 1025, "total_steps": 4144, "loss": 0.5009, "lr": 3.7424855196522376e-05, "epoch": 1.732037193575655, "percentage": 24.73, "elapsed_time": "0:41:59", "remaining_time": "2:07:47"}
206
+ {"current_steps": 1030, "total_steps": 4144, "loss": 0.5331, "lr": 3.738334759461569e-05, "epoch": 1.7404902789518175, "percentage": 24.86, "elapsed_time": "0:42:11", "remaining_time": "2:07:32"}
207
+ {"current_steps": 1035, "total_steps": 4144, "loss": 0.4781, "lr": 3.734153154058902e-05, "epoch": 1.7489433643279797, "percentage": 24.98, "elapsed_time": "0:42:22", "remaining_time": "2:07:15"}
208
+ {"current_steps": 1040, "total_steps": 4144, "loss": 0.5219, "lr": 3.7299407776431236e-05, "epoch": 1.7573964497041419, "percentage": 25.1, "elapsed_time": "0:42:32", "remaining_time": "2:06:59"}
209
+ {"current_steps": 1045, "total_steps": 4144, "loss": 0.5433, "lr": 3.7256977049591255e-05, "epoch": 1.7658495350803043, "percentage": 25.22, "elapsed_time": "0:42:45", "remaining_time": "2:06:46"}
210
+ {"current_steps": 1050, "total_steps": 4144, "loss": 0.5343, "lr": 3.7214240112964776e-05, "epoch": 1.7743026204564667, "percentage": 25.34, "elapsed_time": "0:42:56", "remaining_time": "2:06:33"}
211
+ {"current_steps": 1055, "total_steps": 4144, "loss": 0.496, "lr": 3.7171197724880916e-05, "epoch": 1.7827557058326289, "percentage": 25.46, "elapsed_time": "0:43:09", "remaining_time": "2:06:21"}
212
+ {"current_steps": 1060, "total_steps": 4144, "loss": 0.4876, "lr": 3.712785064908877e-05, "epoch": 1.791208791208791, "percentage": 25.58, "elapsed_time": "0:43:23", "remaining_time": "2:06:13"}
213
+ {"current_steps": 1065, "total_steps": 4144, "loss": 0.5446, "lr": 3.708419965474384e-05, "epoch": 1.7996618765849535, "percentage": 25.7, "elapsed_time": "0:43:34", "remaining_time": "2:05:57"}
214
+ {"current_steps": 1070, "total_steps": 4144, "loss": 0.5088, "lr": 3.704024551639438e-05, "epoch": 1.8081149619611159, "percentage": 25.82, "elapsed_time": "0:43:45", "remaining_time": "2:05:43"}
215
+ {"current_steps": 1075, "total_steps": 4144, "loss": 0.5087, "lr": 3.6995989013967695e-05, "epoch": 1.816568047337278, "percentage": 25.94, "elapsed_time": "0:43:57", "remaining_time": "2:05:30"}
216
+ {"current_steps": 1080, "total_steps": 4144, "loss": 0.4746, "lr": 3.695143093275625e-05, "epoch": 1.8250211327134402, "percentage": 26.06, "elapsed_time": "0:44:09", "remaining_time": "2:05:17"}
217
+ {"current_steps": 1085, "total_steps": 4144, "loss": 0.4817, "lr": 3.690657206340375e-05, "epoch": 1.8334742180896026, "percentage": 26.18, "elapsed_time": "0:44:22", "remaining_time": "2:05:06"}
218
+ {"current_steps": 1090, "total_steps": 4144, "loss": 0.5437, "lr": 3.686141320189116e-05, "epoch": 1.841927303465765, "percentage": 26.3, "elapsed_time": "0:44:34", "remaining_time": "2:04:53"}
219
+ {"current_steps": 1095, "total_steps": 4144, "loss": 0.5124, "lr": 3.6815955149522496e-05, "epoch": 1.8503803888419275, "percentage": 26.42, "elapsed_time": "0:44:47", "remaining_time": "2:04:42"}
220
+ {"current_steps": 1100, "total_steps": 4144, "loss": 0.4856, "lr": 3.677019871291068e-05, "epoch": 1.8588334742180896, "percentage": 26.54, "elapsed_time": "0:44:59", "remaining_time": "2:04:31"}
221
+ {"current_steps": 1105, "total_steps": 4144, "loss": 0.5111, "lr": 3.6724144703963184e-05, "epoch": 1.8672865595942518, "percentage": 26.67, "elapsed_time": "0:45:10", "remaining_time": "2:04:15"}
222
+ {"current_steps": 1110, "total_steps": 4144, "loss": 0.5161, "lr": 3.667779393986765e-05, "epoch": 1.8757396449704142, "percentage": 26.79, "elapsed_time": "0:45:22", "remaining_time": "2:04:02"}
223
+ {"current_steps": 1115, "total_steps": 4144, "loss": 0.5303, "lr": 3.663114724307735e-05, "epoch": 1.8841927303465766, "percentage": 26.91, "elapsed_time": "0:45:34", "remaining_time": "2:03:48"}
224
+ {"current_steps": 1120, "total_steps": 4144, "loss": 0.5047, "lr": 3.6584205441296666e-05, "epoch": 1.8926458157227388, "percentage": 27.03, "elapsed_time": "0:45:47", "remaining_time": "2:03:38"}
225
+ {"current_steps": 1125, "total_steps": 4144, "loss": 0.4995, "lr": 3.6536969367466306e-05, "epoch": 1.901098901098901, "percentage": 27.15, "elapsed_time": "0:46:00", "remaining_time": "2:03:27"}
226
+ {"current_steps": 1130, "total_steps": 4144, "loss": 0.5111, "lr": 3.6489439859748616e-05, "epoch": 1.9095519864750634, "percentage": 27.27, "elapsed_time": "0:46:11", "remaining_time": "2:03:11"}
227
+ {"current_steps": 1135, "total_steps": 4144, "loss": 0.5261, "lr": 3.6441617761512645e-05, "epoch": 1.9180050718512258, "percentage": 27.39, "elapsed_time": "0:46:22", "remaining_time": "2:02:57"}
228
+ {"current_steps": 1140, "total_steps": 4144, "loss": 0.546, "lr": 3.6393503921319205e-05, "epoch": 1.926458157227388, "percentage": 27.51, "elapsed_time": "0:46:34", "remaining_time": "2:02:43"}
229
+ {"current_steps": 1145, "total_steps": 4144, "loss": 0.5119, "lr": 3.6345099192905814e-05, "epoch": 1.9349112426035502, "percentage": 27.63, "elapsed_time": "0:46:45", "remaining_time": "2:02:28"}
230
+ {"current_steps": 1150, "total_steps": 4144, "loss": 0.52, "lr": 3.629640443517154e-05, "epoch": 1.9433643279797126, "percentage": 27.75, "elapsed_time": "0:46:57", "remaining_time": "2:02:15"}
231
+ {"current_steps": 1155, "total_steps": 4144, "loss": 0.4967, "lr": 3.6247420512161774e-05, "epoch": 1.951817413355875, "percentage": 27.87, "elapsed_time": "0:47:08", "remaining_time": "2:02:00"}
232
+ {"current_steps": 1160, "total_steps": 4144, "loss": 0.514, "lr": 3.6198148293052874e-05, "epoch": 1.9602704987320372, "percentage": 27.99, "elapsed_time": "0:47:21", "remaining_time": "2:01:49"}
233
+ {"current_steps": 1165, "total_steps": 4144, "loss": 0.5208, "lr": 3.614858865213678e-05, "epoch": 1.9687235841081994, "percentage": 28.11, "elapsed_time": "0:47:33", "remaining_time": "2:01:36"}
234
+ {"current_steps": 1170, "total_steps": 4144, "loss": 0.4911, "lr": 3.609874246880545e-05, "epoch": 1.9771766694843618, "percentage": 28.23, "elapsed_time": "0:47:44", "remaining_time": "2:01:21"}
235
+ {"current_steps": 1175, "total_steps": 4144, "loss": 0.4609, "lr": 3.6048610627535296e-05, "epoch": 1.9856297548605242, "percentage": 28.35, "elapsed_time": "0:47:56", "remaining_time": "2:01:07"}
236
+ {"current_steps": 1180, "total_steps": 4144, "loss": 0.5301, "lr": 3.599819401787148e-05, "epoch": 1.9940828402366864, "percentage": 28.47, "elapsed_time": "0:48:08", "remaining_time": "2:00:54"}
237
+ {"current_steps": 1185, "total_steps": 4144, "loss": 0.5156, "lr": 3.594749353441213e-05, "epoch": 2.0016906170752327, "percentage": 28.6, "elapsed_time": "0:48:19", "remaining_time": "2:00:40"}
238
+ {"current_steps": 1190, "total_steps": 4144, "loss": 0.4641, "lr": 3.589651007679246e-05, "epoch": 2.0101437024513946, "percentage": 28.72, "elapsed_time": "0:48:30", "remaining_time": "2:00:25"}
239
+ {"current_steps": 1195, "total_steps": 4144, "loss": 0.4628, "lr": 3.58452445496688e-05, "epoch": 2.018596787827557, "percentage": 28.84, "elapsed_time": "0:48:42", "remaining_time": "2:00:13"}
240
+ {"current_steps": 1200, "total_steps": 4144, "loss": 0.4816, "lr": 3.579369786270257e-05, "epoch": 2.0270498732037194, "percentage": 28.96, "elapsed_time": "0:48:53", "remaining_time": "1:59:57"}
241
+ {"current_steps": 1205, "total_steps": 4144, "loss": 0.4932, "lr": 3.574187093054411e-05, "epoch": 2.035502958579882, "percentage": 29.08, "elapsed_time": "0:49:05", "remaining_time": "1:59:44"}
242
+ {"current_steps": 1210, "total_steps": 4144, "loss": 0.4477, "lr": 3.5689764672816464e-05, "epoch": 2.043956043956044, "percentage": 29.2, "elapsed_time": "0:49:17", "remaining_time": "1:59:31"}
243
+ {"current_steps": 1215, "total_steps": 4144, "loss": 0.451, "lr": 3.563738001409908e-05, "epoch": 2.052409129332206, "percentage": 29.32, "elapsed_time": "0:49:29", "remaining_time": "1:59:17"}
244
+ {"current_steps": 1220, "total_steps": 4144, "loss": 0.462, "lr": 3.558471788391136e-05, "epoch": 2.0608622147083686, "percentage": 29.44, "elapsed_time": "0:49:42", "remaining_time": "1:59:09"}
245
+ {"current_steps": 1225, "total_steps": 4144, "loss": 0.4573, "lr": 3.5531779216696195e-05, "epoch": 2.069315300084531, "percentage": 29.56, "elapsed_time": "0:49:54", "remaining_time": "1:58:54"}
246
+ {"current_steps": 1230, "total_steps": 4144, "loss": 0.4392, "lr": 3.54785649518034e-05, "epoch": 2.077768385460693, "percentage": 29.68, "elapsed_time": "0:50:06", "remaining_time": "1:58:42"}
247
+ {"current_steps": 1235, "total_steps": 4144, "loss": 0.4548, "lr": 3.5425076033473e-05, "epoch": 2.0862214708368554, "percentage": 29.8, "elapsed_time": "0:50:18", "remaining_time": "1:58:29"}
248
+ {"current_steps": 1240, "total_steps": 4144, "loss": 0.453, "lr": 3.537131341081854e-05, "epoch": 2.094674556213018, "percentage": 29.92, "elapsed_time": "0:50:29", "remaining_time": "1:58:14"}
249
+ {"current_steps": 1245, "total_steps": 4144, "loss": 0.4638, "lr": 3.531727803781016e-05, "epoch": 2.10312764158918, "percentage": 30.04, "elapsed_time": "0:50:40", "remaining_time": "1:58:00"}
250
+ {"current_steps": 1250, "total_steps": 4144, "loss": 0.4734, "lr": 3.5262970873257754e-05, "epoch": 2.111580726965342, "percentage": 30.16, "elapsed_time": "0:50:53", "remaining_time": "1:57:50"}
251
+ {"current_steps": 1255, "total_steps": 4144, "loss": 0.4657, "lr": 3.520839288079388e-05, "epoch": 2.1200338123415046, "percentage": 30.28, "elapsed_time": "0:51:05", "remaining_time": "1:57:37"}
252
+ {"current_steps": 1260, "total_steps": 4144, "loss": 0.4837, "lr": 3.515354502885673e-05, "epoch": 2.128486897717667, "percentage": 30.41, "elapsed_time": "0:51:17", "remaining_time": "1:57:24"}
253
+ {"current_steps": 1265, "total_steps": 4144, "loss": 0.4525, "lr": 3.50984282906729e-05, "epoch": 2.1369399830938294, "percentage": 30.53, "elapsed_time": "0:51:29", "remaining_time": "1:57:10"}
254
+ {"current_steps": 1270, "total_steps": 4144, "loss": 0.4652, "lr": 3.5043043644240135e-05, "epoch": 2.145393068469992, "percentage": 30.65, "elapsed_time": "0:51:41", "remaining_time": "1:56:58"}
255
+ {"current_steps": 1275, "total_steps": 4144, "loss": 0.4385, "lr": 3.4987392072309964e-05, "epoch": 2.1538461538461537, "percentage": 30.77, "elapsed_time": "0:51:52", "remaining_time": "1:56:44"}
256
+ {"current_steps": 1280, "total_steps": 4144, "loss": 0.4507, "lr": 3.493147456237029e-05, "epoch": 2.162299239222316, "percentage": 30.89, "elapsed_time": "0:52:04", "remaining_time": "1:56:31"}
257
+ {"current_steps": 1285, "total_steps": 4144, "loss": 0.4457, "lr": 3.487529210662784e-05, "epoch": 2.1707523245984786, "percentage": 31.01, "elapsed_time": "0:52:16", "remaining_time": "1:56:18"}
258
+ {"current_steps": 1290, "total_steps": 4144, "loss": 0.4641, "lr": 3.481884570199058e-05, "epoch": 2.1792054099746405, "percentage": 31.13, "elapsed_time": "0:52:30", "remaining_time": "1:56:09"}
259
+ {"current_steps": 1295, "total_steps": 4144, "loss": 0.4701, "lr": 3.476213635005001e-05, "epoch": 2.187658495350803, "percentage": 31.25, "elapsed_time": "0:52:41", "remaining_time": "1:55:54"}
260
+ {"current_steps": 1300, "total_steps": 4144, "loss": 0.4792, "lr": 3.470516505706339e-05, "epoch": 2.1961115807269653, "percentage": 31.37, "elapsed_time": "0:52:52", "remaining_time": "1:55:40"}
261
+ {"current_steps": 1305, "total_steps": 4144, "loss": 0.4607, "lr": 3.46479328339359e-05, "epoch": 2.2045646661031277, "percentage": 31.49, "elapsed_time": "0:53:04", "remaining_time": "1:55:27"}
262
+ {"current_steps": 1310, "total_steps": 4144, "loss": 0.4339, "lr": 3.459044069620269e-05, "epoch": 2.21301775147929, "percentage": 31.61, "elapsed_time": "0:53:15", "remaining_time": "1:55:13"}
263
+ {"current_steps": 1315, "total_steps": 4144, "loss": 0.4513, "lr": 3.453268966401087e-05, "epoch": 2.221470836855452, "percentage": 31.73, "elapsed_time": "0:53:27", "remaining_time": "1:55:01"}
264
+ {"current_steps": 1320, "total_steps": 4144, "loss": 0.4651, "lr": 3.44746807621014e-05, "epoch": 2.2299239222316145, "percentage": 31.85, "elapsed_time": "0:53:39", "remaining_time": "1:54:47"}
265
+ {"current_steps": 1325, "total_steps": 4144, "loss": 0.4663, "lr": 3.4416415019790924e-05, "epoch": 2.238377007607777, "percentage": 31.97, "elapsed_time": "0:53:50", "remaining_time": "1:54:33"}
266
+ {"current_steps": 1330, "total_steps": 4144, "loss": 0.4577, "lr": 3.4357893470953455e-05, "epoch": 2.2468300929839393, "percentage": 32.09, "elapsed_time": "0:54:02", "remaining_time": "1:54:20"}
267
+ {"current_steps": 1335, "total_steps": 4144, "loss": 0.4353, "lr": 3.42991171540021e-05, "epoch": 2.2552831783601013, "percentage": 32.22, "elapsed_time": "0:54:14", "remaining_time": "1:54:08"}
268
+ {"current_steps": 1340, "total_steps": 4144, "loss": 0.463, "lr": 3.42400871118706e-05, "epoch": 2.2637362637362637, "percentage": 32.34, "elapsed_time": "0:54:27", "remaining_time": "1:53:57"}
269
+ {"current_steps": 1345, "total_steps": 4144, "loss": 0.4712, "lr": 3.4180804391994794e-05, "epoch": 2.272189349112426, "percentage": 32.46, "elapsed_time": "0:54:38", "remaining_time": "1:53:42"}
270
+ {"current_steps": 1350, "total_steps": 4144, "loss": 0.4518, "lr": 3.4121270046294104e-05, "epoch": 2.2806424344885885, "percentage": 32.58, "elapsed_time": "0:54:50", "remaining_time": "1:53:31"}
271
+ {"current_steps": 1355, "total_steps": 4144, "loss": 0.4791, "lr": 3.4061485131152804e-05, "epoch": 2.2890955198647505, "percentage": 32.7, "elapsed_time": "0:55:02", "remaining_time": "1:53:17"}
272
+ {"current_steps": 1360, "total_steps": 4144, "loss": 0.4331, "lr": 3.400145070740131e-05, "epoch": 2.297548605240913, "percentage": 32.82, "elapsed_time": "0:55:15", "remaining_time": "1:53:07"}
273
+ {"current_steps": 1365, "total_steps": 4144, "loss": 0.4689, "lr": 3.394116784029733e-05, "epoch": 2.3060016906170753, "percentage": 32.94, "elapsed_time": "0:55:27", "remaining_time": "1:52:55"}
274
+ {"current_steps": 1370, "total_steps": 4144, "loss": 0.4577, "lr": 3.388063759950701e-05, "epoch": 2.3144547759932377, "percentage": 33.06, "elapsed_time": "0:55:39", "remaining_time": "1:52:41"}
275
+ {"current_steps": 1375, "total_steps": 4144, "loss": 0.4517, "lr": 3.381986105908591e-05, "epoch": 2.3229078613693996, "percentage": 33.18, "elapsed_time": "0:55:51", "remaining_time": "1:52:28"}
276
+ {"current_steps": 1380, "total_steps": 4144, "loss": 0.4206, "lr": 3.375883929745995e-05, "epoch": 2.331360946745562, "percentage": 33.3, "elapsed_time": "0:56:02", "remaining_time": "1:52:13"}
277
+ {"current_steps": 1385, "total_steps": 4144, "loss": 0.4527, "lr": 3.369757339740629e-05, "epoch": 2.3398140321217245, "percentage": 33.42, "elapsed_time": "0:56:13", "remaining_time": "1:51:59"}
278
+ {"current_steps": 1390, "total_steps": 4144, "loss": 0.4725, "lr": 3.363606444603411e-05, "epoch": 2.348267117497887, "percentage": 33.54, "elapsed_time": "0:56:25", "remaining_time": "1:51:47"}
279
+ {"current_steps": 1395, "total_steps": 4144, "loss": 0.4505, "lr": 3.357431353476532e-05, "epoch": 2.3567202028740493, "percentage": 33.66, "elapsed_time": "0:56:38", "remaining_time": "1:51:36"}
280
+ {"current_steps": 1400, "total_steps": 4144, "loss": 0.4692, "lr": 3.3512321759315196e-05, "epoch": 2.3651732882502112, "percentage": 33.78, "elapsed_time": "0:56:49", "remaining_time": "1:51:23"}
281
+ {"current_steps": 1405, "total_steps": 4144, "loss": 0.4241, "lr": 3.345009021967294e-05, "epoch": 2.3736263736263736, "percentage": 33.9, "elapsed_time": "0:57:01", "remaining_time": "1:51:10"}
282
+ {"current_steps": 1410, "total_steps": 4144, "loss": 0.4604, "lr": 3.3387620020082155e-05, "epoch": 2.382079459002536, "percentage": 34.03, "elapsed_time": "0:57:13", "remaining_time": "1:50:57"}
283
+ {"current_steps": 1415, "total_steps": 4144, "loss": 0.4466, "lr": 3.332491226902124e-05, "epoch": 2.390532544378698, "percentage": 34.15, "elapsed_time": "0:57:26", "remaining_time": "1:50:47"}
284
+ {"current_steps": 1420, "total_steps": 4144, "loss": 0.4726, "lr": 3.3261968079183744e-05, "epoch": 2.3989856297548604, "percentage": 34.27, "elapsed_time": "0:57:37", "remaining_time": "1:50:33"}
285
+ {"current_steps": 1425, "total_steps": 4144, "loss": 0.4465, "lr": 3.319878856745862e-05, "epoch": 2.407438715131023, "percentage": 34.39, "elapsed_time": "0:57:48", "remaining_time": "1:50:18"}
286
+ {"current_steps": 1430, "total_steps": 4144, "loss": 0.4328, "lr": 3.31353748549104e-05, "epoch": 2.415891800507185, "percentage": 34.51, "elapsed_time": "0:57:59", "remaining_time": "1:50:04"}
287
+ {"current_steps": 1435, "total_steps": 4144, "loss": 0.4599, "lr": 3.307172806675929e-05, "epoch": 2.4243448858833476, "percentage": 34.63, "elapsed_time": "0:58:11", "remaining_time": "1:49:51"}
288
+ {"current_steps": 1440, "total_steps": 4144, "loss": 0.4344, "lr": 3.3007849332361214e-05, "epoch": 2.4327979712595096, "percentage": 34.75, "elapsed_time": "0:58:23", "remaining_time": "1:49:38"}
289
+ {"current_steps": 1445, "total_steps": 4144, "loss": 0.4696, "lr": 3.29437397851878e-05, "epoch": 2.441251056635672, "percentage": 34.87, "elapsed_time": "0:58:34", "remaining_time": "1:49:24"}
290
+ {"current_steps": 1450, "total_steps": 4144, "loss": 0.4547, "lr": 3.2879400562806226e-05, "epoch": 2.4497041420118344, "percentage": 34.99, "elapsed_time": "0:58:47", "remaining_time": "1:49:13"}
291
+ {"current_steps": 1455, "total_steps": 4144, "loss": 0.4612, "lr": 3.281483280685906e-05, "epoch": 2.458157227387997, "percentage": 35.11, "elapsed_time": "0:58:59", "remaining_time": "1:49:01"}
292
+ {"current_steps": 1460, "total_steps": 4144, "loss": 0.4683, "lr": 3.2750037663043985e-05, "epoch": 2.4666103127641588, "percentage": 35.23, "elapsed_time": "0:59:12", "remaining_time": "1:48:50"}
293
+ {"current_steps": 1465, "total_steps": 4144, "loss": 0.4645, "lr": 3.2685016281093494e-05, "epoch": 2.475063398140321, "percentage": 35.35, "elapsed_time": "0:59:23", "remaining_time": "1:48:36"}
294
+ {"current_steps": 1470, "total_steps": 4144, "loss": 0.4456, "lr": 3.2619769814754464e-05, "epoch": 2.4835164835164836, "percentage": 35.47, "elapsed_time": "0:59:35", "remaining_time": "1:48:23"}
295
+ {"current_steps": 1475, "total_steps": 4144, "loss": 0.4369, "lr": 3.2554299421767715e-05, "epoch": 2.491969568892646, "percentage": 35.59, "elapsed_time": "0:59:47", "remaining_time": "1:48:11"}
296
+ {"current_steps": 1480, "total_steps": 4144, "loss": 0.4901, "lr": 3.2488606263847425e-05, "epoch": 2.5004226542688084, "percentage": 35.71, "elapsed_time": "0:59:59", "remaining_time": "1:47:59"}
297
+ {"current_steps": 1485, "total_steps": 4144, "loss": 0.4931, "lr": 3.242269150666054e-05, "epoch": 2.5088757396449703, "percentage": 35.83, "elapsed_time": "1:00:10", "remaining_time": "1:47:45"}
298
+ {"current_steps": 1490, "total_steps": 4144, "loss": 0.4713, "lr": 3.2356556319806106e-05, "epoch": 2.5173288250211328, "percentage": 35.96, "elapsed_time": "1:00:21", "remaining_time": "1:47:31"}
299
+ {"current_steps": 1495, "total_steps": 4144, "loss": 0.4524, "lr": 3.2290201876794494e-05, "epoch": 2.525781910397295, "percentage": 36.08, "elapsed_time": "1:00:34", "remaining_time": "1:47:19"}
300
+ {"current_steps": 1500, "total_steps": 4144, "loss": 0.4547, "lr": 3.222362935502656e-05, "epoch": 2.534234995773457, "percentage": 36.2, "elapsed_time": "1:00:45", "remaining_time": "1:47:05"}
301
+ {"current_steps": 1505, "total_steps": 4144, "loss": 0.4361, "lr": 3.2156839935772805e-05, "epoch": 2.5426880811496195, "percentage": 36.32, "elapsed_time": "1:01:34", "remaining_time": "1:47:57"}
302
+ {"current_steps": 1510, "total_steps": 4144, "loss": 0.4389, "lr": 3.2089834804152364e-05, "epoch": 2.551141166525782, "percentage": 36.44, "elapsed_time": "1:01:46", "remaining_time": "1:47:44"}
303
+ {"current_steps": 1515, "total_steps": 4144, "loss": 0.4626, "lr": 3.2022615149112e-05, "epoch": 2.5595942519019443, "percentage": 36.56, "elapsed_time": "1:01:58", "remaining_time": "1:47:31"}
304
+ {"current_steps": 1520, "total_steps": 4144, "loss": 0.4482, "lr": 3.195518216340501e-05, "epoch": 2.5680473372781067, "percentage": 36.68, "elapsed_time": "1:02:10", "remaining_time": "1:47:19"}
305
+ {"current_steps": 1525, "total_steps": 4144, "loss": 0.4638, "lr": 3.1887537043570044e-05, "epoch": 2.5765004226542687, "percentage": 36.8, "elapsed_time": "1:02:23", "remaining_time": "1:47:09"}
306
+ {"current_steps": 1530, "total_steps": 4144, "loss": 0.4592, "lr": 3.181968098990991e-05, "epoch": 2.584953508030431, "percentage": 36.92, "elapsed_time": "1:02:35", "remaining_time": "1:46:56"}
307
+ {"current_steps": 1535, "total_steps": 4144, "loss": 0.4691, "lr": 3.175161520647022e-05, "epoch": 2.5934065934065935, "percentage": 37.04, "elapsed_time": "1:02:47", "remaining_time": "1:46:43"}
308
+ {"current_steps": 1540, "total_steps": 4144, "loss": 0.4566, "lr": 3.168334090101806e-05, "epoch": 2.6018596787827555, "percentage": 37.16, "elapsed_time": "1:02:59", "remaining_time": "1:46:30"}
309
+ {"current_steps": 1545, "total_steps": 4144, "loss": 0.475, "lr": 3.1614859285020585e-05, "epoch": 2.610312764158918, "percentage": 37.28, "elapsed_time": "1:03:11", "remaining_time": "1:46:17"}
310
+ {"current_steps": 1550, "total_steps": 4144, "loss": 0.4474, "lr": 3.1546171573623445e-05, "epoch": 2.6187658495350803, "percentage": 37.4, "elapsed_time": "1:03:22", "remaining_time": "1:46:04"}
311
+ {"current_steps": 1555, "total_steps": 4144, "loss": 0.4463, "lr": 3.147727898562931e-05, "epoch": 2.6272189349112427, "percentage": 37.52, "elapsed_time": "1:03:34", "remaining_time": "1:45:51"}
312
+ {"current_steps": 1560, "total_steps": 4144, "loss": 0.4613, "lr": 3.140818274347618e-05, "epoch": 2.635672020287405, "percentage": 37.64, "elapsed_time": "1:03:46", "remaining_time": "1:45:38"}
313
+ {"current_steps": 1565, "total_steps": 4144, "loss": 0.4554, "lr": 3.133888407321574e-05, "epoch": 2.644125105663567, "percentage": 37.77, "elapsed_time": "1:03:59", "remaining_time": "1:45:26"}
314
+ {"current_steps": 1570, "total_steps": 4144, "loss": 0.4569, "lr": 3.126938420449155e-05, "epoch": 2.6525781910397295, "percentage": 37.89, "elapsed_time": "1:04:10", "remaining_time": "1:45:13"}
315
+ {"current_steps": 1575, "total_steps": 4144, "loss": 0.4703, "lr": 3.119968437051729e-05, "epoch": 2.661031276415892, "percentage": 38.01, "elapsed_time": "1:04:22", "remaining_time": "1:45:00"}
316
+ {"current_steps": 1580, "total_steps": 4144, "loss": 0.4722, "lr": 3.1129785808054836e-05, "epoch": 2.669484361792054, "percentage": 38.13, "elapsed_time": "1:04:35", "remaining_time": "1:44:48"}
317
+ {"current_steps": 1585, "total_steps": 4144, "loss": 0.4523, "lr": 3.105968975739232e-05, "epoch": 2.6779374471682162, "percentage": 38.25, "elapsed_time": "1:04:47", "remaining_time": "1:44:36"}
318
+ {"current_steps": 1590, "total_steps": 4144, "loss": 0.4853, "lr": 3.098939746232212e-05, "epoch": 2.6863905325443787, "percentage": 38.37, "elapsed_time": "1:04:58", "remaining_time": "1:44:21"}
319
+ {"current_steps": 1595, "total_steps": 4144, "loss": 0.4633, "lr": 3.091891017011883e-05, "epoch": 2.694843617920541, "percentage": 38.49, "elapsed_time": "1:05:09", "remaining_time": "1:44:08"}
320
+ {"current_steps": 1600, "total_steps": 4144, "loss": 0.4795, "lr": 3.084822913151706e-05, "epoch": 2.7032967032967035, "percentage": 38.61, "elapsed_time": "1:05:21", "remaining_time": "1:43:54"}
321
+ {"current_steps": 1605, "total_steps": 4144, "loss": 0.4509, "lr": 3.077735560068928e-05, "epoch": 2.711749788672866, "percentage": 38.73, "elapsed_time": "1:05:32", "remaining_time": "1:43:40"}
322
+ {"current_steps": 1610, "total_steps": 4144, "loss": 0.4659, "lr": 3.070629083522361e-05, "epoch": 2.720202874049028, "percentage": 38.85, "elapsed_time": "1:05:43", "remaining_time": "1:43:27"}
323
+ {"current_steps": 1615, "total_steps": 4144, "loss": 0.4465, "lr": 3.0635036096101396e-05, "epoch": 2.7286559594251902, "percentage": 38.97, "elapsed_time": "1:05:54", "remaining_time": "1:43:12"}
324
+ {"current_steps": 1620, "total_steps": 4144, "loss": 0.4854, "lr": 3.056359264767494e-05, "epoch": 2.7371090448013526, "percentage": 39.09, "elapsed_time": "1:06:06", "remaining_time": "1:42:59"}
325
+ {"current_steps": 1625, "total_steps": 4144, "loss": 0.445, "lr": 3.0491961757645003e-05, "epoch": 2.7455621301775146, "percentage": 39.21, "elapsed_time": "1:06:18", "remaining_time": "1:42:47"}
326
+ {"current_steps": 1630, "total_steps": 4144, "loss": 0.457, "lr": 3.042014469703836e-05, "epoch": 2.754015215553677, "percentage": 39.33, "elapsed_time": "1:06:31", "remaining_time": "1:42:35"}
327
+ {"current_steps": 1635, "total_steps": 4144, "loss": 0.4693, "lr": 3.0348142740185172e-05, "epoch": 2.7624683009298394, "percentage": 39.45, "elapsed_time": "1:06:42", "remaining_time": "1:42:21"}
328
+ {"current_steps": 1640, "total_steps": 4144, "loss": 0.4542, "lr": 3.027595716469647e-05, "epoch": 2.770921386306002, "percentage": 39.58, "elapsed_time": "1:06:53", "remaining_time": "1:42:07"}
329
+ {"current_steps": 1645, "total_steps": 4144, "loss": 0.4598, "lr": 3.02035892514414e-05, "epoch": 2.7793744716821642, "percentage": 39.7, "elapsed_time": "1:07:05", "remaining_time": "1:41:55"}
330
+ {"current_steps": 1650, "total_steps": 4144, "loss": 0.4427, "lr": 3.0131040284524543e-05, "epoch": 2.787827557058326, "percentage": 39.82, "elapsed_time": "1:07:17", "remaining_time": "1:41:42"}
331
+ {"current_steps": 1655, "total_steps": 4144, "loss": 0.4701, "lr": 3.0058311551263116e-05, "epoch": 2.7962806424344886, "percentage": 39.94, "elapsed_time": "1:07:28", "remaining_time": "1:41:28"}
332
+ {"current_steps": 1660, "total_steps": 4144, "loss": 0.4343, "lr": 2.9985404342164136e-05, "epoch": 2.804733727810651, "percentage": 40.06, "elapsed_time": "1:07:41", "remaining_time": "1:41:17"}
333
+ {"current_steps": 1665, "total_steps": 4144, "loss": 0.4955, "lr": 2.9912319950901498e-05, "epoch": 2.813186813186813, "percentage": 40.18, "elapsed_time": "1:07:54", "remaining_time": "1:41:06"}
334
+ {"current_steps": 1670, "total_steps": 4144, "loss": 0.4727, "lr": 2.9839059674293058e-05, "epoch": 2.8216398985629754, "percentage": 40.3, "elapsed_time": "1:08:06", "remaining_time": "1:40:53"}
335
+ {"current_steps": 1675, "total_steps": 4144, "loss": 0.4621, "lr": 2.97656248122776e-05, "epoch": 2.8300929839391378, "percentage": 40.42, "elapsed_time": "1:08:17", "remaining_time": "1:40:39"}
336
+ {"current_steps": 1680, "total_steps": 4144, "loss": 0.4515, "lr": 2.969201666789176e-05, "epoch": 2.8385460693153, "percentage": 40.54, "elapsed_time": "1:08:29", "remaining_time": "1:40:27"}
337
+ {"current_steps": 1685, "total_steps": 4144, "loss": 0.4374, "lr": 2.961823654724692e-05, "epoch": 2.8469991546914626, "percentage": 40.66, "elapsed_time": "1:08:42", "remaining_time": "1:40:15"}
338
+ {"current_steps": 1690, "total_steps": 4144, "loss": 0.443, "lr": 2.954428575950603e-05, "epoch": 2.8554522400676245, "percentage": 40.78, "elapsed_time": "1:08:53", "remaining_time": "1:40:02"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:845931ec6c1d1108766ee64a4dbdf4a5a879b06c1c20c3824a89753cb92e7152
3
+ size 8657
vocab.json ADDED
The diff for this file is too large to render. See raw diff