penfever commited on
Commit
593ab8f
·
0 Parent(s):

Reset repository without checkpoint dirs

Browse files
.gitattributes ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: apache-2.0
4
+ base_model: Qwen/Qwen3-8B
5
+ tags:
6
+ - llama-factory
7
+ - generated_from_trainer
8
+ model-index:
9
+ - name: Qwen3-Coder-480B-codeforces-fixeps
10
+ results: []
11
+ ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # Qwen3-Coder-480B-codeforces-fixeps
17
+
18
+ This model is a fine-tuned version of [Qwen/Qwen3-8B](https://huggingface.co/Qwen/Qwen3-8B) on an unknown dataset.
19
+
20
+ ## Model description
21
+
22
+ More information needed
23
+
24
+ ## Intended uses & limitations
25
+
26
+ More information needed
27
+
28
+ ## Training and evaluation data
29
+
30
+ More information needed
31
+
32
+ ## Training procedure
33
+
34
+ ### Training hyperparameters
35
+
36
+ The following hyperparameters were used during training:
37
+ - learning_rate: 4e-05
38
+ - train_batch_size: 1
39
+ - eval_batch_size: 8
40
+ - seed: 42
41
+ - distributed_type: multi-GPU
42
+ - num_devices: 16
43
+ - total_train_batch_size: 16
44
+ - total_eval_batch_size: 128
45
+ - optimizer: Use OptimizerNames.ADAMW_TORCH_FUSED with betas=(0.9,0.98) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
46
+ - lr_scheduler_type: cosine
47
+ - lr_scheduler_warmup_ratio: 0.1
48
+ - num_epochs: 7.0
49
+
50
+ ### Training results
51
+
52
+
53
+
54
+ ### Framework versions
55
+
56
+ - Transformers 4.56.0
57
+ - Pytorch 2.9.0+cu128
58
+ - Datasets 4.4.1
59
+ - Tokenizers 0.22.1
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
chat_template.jinja ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
18
+ {%- for message in messages[::-1] %}
19
+ {%- set index = (messages|length - 1) - loop.index0 %}
20
+ {%- if ns.multi_step_tool and message.role == "user" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}
21
+ {%- set ns.multi_step_tool = false %}
22
+ {%- set ns.last_query_index = index %}
23
+ {%- endif %}
24
+ {%- endfor %}
25
+ {%- for message in messages %}
26
+ {%- if message.content is string %}
27
+ {%- set content = message.content %}
28
+ {%- else %}
29
+ {%- set content = '' %}
30
+ {%- endif %}
31
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
32
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
33
+ {%- elif message.role == "assistant" %}
34
+ {%- set reasoning_content = '' %}
35
+ {%- if message.reasoning_content is string %}
36
+ {%- set reasoning_content = message.reasoning_content %}
37
+ {%- else %}
38
+ {%- if '</think>' in content %}
39
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
40
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
41
+ {%- endif %}
42
+ {%- endif %}
43
+ {%- if loop.index0 > ns.last_query_index %}
44
+ {%- if loop.last or (not loop.last and reasoning_content) %}
45
+ {{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}
46
+ {%- else %}
47
+ {{- '<|im_start|>' + message.role + '\n' + content }}
48
+ {%- endif %}
49
+ {%- else %}
50
+ {{- '<|im_start|>' + message.role + '\n' + content }}
51
+ {%- endif %}
52
+ {%- if message.tool_calls %}
53
+ {%- for tool_call in message.tool_calls %}
54
+ {%- if (loop.first and content) or (not loop.first) %}
55
+ {{- '\n' }}
56
+ {%- endif %}
57
+ {%- if tool_call.function %}
58
+ {%- set tool_call = tool_call.function %}
59
+ {%- endif %}
60
+ {{- '<tool_call>\n{"name": "' }}
61
+ {{- tool_call.name }}
62
+ {{- '", "arguments": ' }}
63
+ {%- if tool_call.arguments is string %}
64
+ {{- tool_call.arguments }}
65
+ {%- else %}
66
+ {{- tool_call.arguments | tojson }}
67
+ {%- endif %}
68
+ {{- '}\n</tool_call>' }}
69
+ {%- endfor %}
70
+ {%- endif %}
71
+ {{- '<|im_end|>\n' }}
72
+ {%- elif message.role == "tool" %}
73
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
74
+ {{- '<|im_start|>user' }}
75
+ {%- endif %}
76
+ {{- '\n<tool_response>\n' }}
77
+ {{- content }}
78
+ {{- '\n</tool_response>' }}
79
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
80
+ {{- '<|im_end|>\n' }}
81
+ {%- endif %}
82
+ {%- endif %}
83
+ {%- endfor %}
84
+ {%- if add_generation_prompt %}
85
+ {{- '<|im_start|>assistant\n' }}
86
+ {%- if enable_thinking is defined and enable_thinking is false %}
87
+ {{- '<think>\n\n</think>\n\n' }}
88
+ {%- endif %}
89
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 151645,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 12288,
14
+ "layer_types": [
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention"
51
+ ],
52
+ "max_position_embeddings": 40960,
53
+ "max_window_layers": 36,
54
+ "model_type": "qwen3",
55
+ "num_attention_heads": 32,
56
+ "num_hidden_layers": 36,
57
+ "num_key_value_heads": 8,
58
+ "pad_token_id": 151643,
59
+ "rms_norm_eps": 1e-06,
60
+ "rope_scaling": null,
61
+ "rope_theta": 1000000,
62
+ "sliding_window": null,
63
+ "tie_word_embeddings": false,
64
+ "transformers_version": "4.56.0",
65
+ "use_cache": false,
66
+ "use_sliding_window": false,
67
+ "vocab_size": 151936
68
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_sample": true,
3
+ "eos_token_id": [
4
+ 151645,
5
+ 151643
6
+ ],
7
+ "pad_token_id": 151643,
8
+ "temperature": 0.6,
9
+ "top_k": 20,
10
+ "top_p": 0.95,
11
+ "transformers_version": "4.56.0"
12
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c42d7dc0f2119a059513031e42f9af12dddfa2fdce142d4e8deb9fa8d47e986
3
+ size 4902257696
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6af9988738698b48a14a511acba89fcab34063ef506eec53c500613d4a1aa88
3
+ size 4915960368
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e93b7d28fa4b3c4608ab50b8546ed357871bfdcfbb3e91ea366f72670df22482
3
+ size 4983068496
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a815c478d0a8de4d1fb2143c778acf38eb4efbd53eb843634df1e738756fa89b
3
+ size 1580230264
model.safetensors.index.json ADDED
@@ -0,0 +1,407 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 308224,
4
+ "total_size": 16381470720
5
+ },
6
+ "weight_map": {
7
+ "lm_head.weight": "model-00004-of-00004.safetensors",
8
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
31
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
32
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
43
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
53
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
65
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
67
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
86
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
91
+ "model.layers.15.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
98
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.16.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
103
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
110
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
113
+ "model.layers.17.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
122
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.18.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
125
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
127
+ "model.layers.18.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
129
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
130
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
133
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
134
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.19.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
137
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.19.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
139
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
141
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
142
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
143
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
144
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
145
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
146
+ "model.layers.2.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
147
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
148
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
149
+ "model.layers.2.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
150
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
151
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
152
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00004.safetensors",
153
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
154
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
155
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
156
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
157
+ "model.layers.20.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
158
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
159
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
160
+ "model.layers.20.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
161
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
162
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
163
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00004.safetensors",
164
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
165
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
166
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
167
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
168
+ "model.layers.21.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
169
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
170
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
171
+ "model.layers.21.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
172
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
173
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
174
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
175
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.22.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
180
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
181
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
182
+ "model.layers.22.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
183
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
184
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
185
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.23.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.23.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
194
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
197
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.24.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.24.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
206
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
211
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.25.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.25.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
218
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
221
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
223
+ "model.layers.26.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.26.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
230
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
233
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.27.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
235
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.27.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
242
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
245
+ "model.layers.28.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
247
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.28.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
254
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.29.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
257
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
259
+ "model.layers.29.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
261
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
262
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
274
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
275
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
276
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
277
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
278
+ "model.layers.30.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
279
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
280
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
281
+ "model.layers.30.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
282
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
283
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
284
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors",
285
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
286
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
287
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
288
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
289
+ "model.layers.31.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
290
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
291
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
292
+ "model.layers.31.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
293
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
294
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
295
+ "model.layers.32.input_layernorm.weight": "model-00003-of-00004.safetensors",
296
+ "model.layers.32.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
297
+ "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
298
+ "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
299
+ "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
300
+ "model.layers.32.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
301
+ "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
302
+ "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
303
+ "model.layers.32.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
304
+ "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
305
+ "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
306
+ "model.layers.33.input_layernorm.weight": "model-00003-of-00004.safetensors",
307
+ "model.layers.33.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
308
+ "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
309
+ "model.layers.33.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
310
+ "model.layers.33.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
311
+ "model.layers.33.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
312
+ "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
313
+ "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
314
+ "model.layers.33.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
315
+ "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
316
+ "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
317
+ "model.layers.34.input_layernorm.weight": "model-00003-of-00004.safetensors",
318
+ "model.layers.34.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
319
+ "model.layers.34.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
320
+ "model.layers.34.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
321
+ "model.layers.34.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
322
+ "model.layers.34.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
323
+ "model.layers.34.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
324
+ "model.layers.34.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
325
+ "model.layers.34.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
326
+ "model.layers.34.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
327
+ "model.layers.34.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
328
+ "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors",
329
+ "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
330
+ "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
331
+ "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
332
+ "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
333
+ "model.layers.35.self_attn.k_norm.weight": "model-00004-of-00004.safetensors",
334
+ "model.layers.35.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
335
+ "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
336
+ "model.layers.35.self_attn.q_norm.weight": "model-00004-of-00004.safetensors",
337
+ "model.layers.35.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
338
+ "model.layers.35.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
339
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
340
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
341
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
342
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
343
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
344
+ "model.layers.4.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
345
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
346
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
347
+ "model.layers.4.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
348
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
349
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
350
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
351
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
352
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
353
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
354
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
355
+ "model.layers.5.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
356
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
357
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
358
+ "model.layers.5.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
359
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
360
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
361
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
362
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
363
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
364
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
365
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
366
+ "model.layers.6.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
367
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
368
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
369
+ "model.layers.6.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
370
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
371
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
372
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
373
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
374
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
375
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
376
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
377
+ "model.layers.7.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
378
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
379
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
380
+ "model.layers.7.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
381
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
382
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
383
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors",
384
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
385
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
386
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
387
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
388
+ "model.layers.8.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
389
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
390
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
391
+ "model.layers.8.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
392
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
393
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
394
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
395
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
396
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
397
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
398
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
399
+ "model.layers.9.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
400
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
401
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
402
+ "model.layers.9.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
403
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
404
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
405
+ "model.norm.weight": "model-00004-of-00004.safetensors"
406
+ }
407
+ }
run_summary.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "agent_name": null,
3
+ "training_start": null,
4
+ "training_end": null,
5
+ "created_by": "DCAgent",
6
+ "base_model_name": "Qwen/Qwen3-8B",
7
+ "dataset_name": "penfever/Qwen3-Coder-480B-codeforces",
8
+ "training_type": "SFT",
9
+ "training_parameters": "https://huggingface.co/DCAgent2/Qwen3-Coder-480B-codeforces-fixeps/blob/main/config.json",
10
+ "wandb_link": "https://wandb.ai/dogml/dc-agent/runs/Qwen3-Coder-480B-codeforces-fixeps_Qwen3-8B",
11
+ "traces_location_s3": null
12
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "clean_up_tokenization_spaces": false,
231
+ "eos_token": "<|im_end|>",
232
+ "errors": "replace",
233
+ "extra_special_tokens": {},
234
+ "model_max_length": 32768,
235
+ "pad_token": "<|endoftext|>",
236
+ "padding_side": "right",
237
+ "split_special_tokens": false,
238
+ "tokenizer_class": "Qwen2Tokenizer",
239
+ "unk_token": null
240
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 5, "total_steps": 1757, "loss": 0.5643, "lr": 9.090909090909091e-07, "epoch": 0.0199203187250996, "percentage": 0.28, "elapsed_time": "0:00:34", "remaining_time": "3:20:43"}
2
+ {"current_steps": 10, "total_steps": 1757, "loss": 0.5577, "lr": 2.0454545454545457e-06, "epoch": 0.0398406374501992, "percentage": 0.57, "elapsed_time": "0:00:53", "remaining_time": "2:37:05"}
3
+ {"current_steps": 15, "total_steps": 1757, "loss": 0.4788, "lr": 3.181818181818182e-06, "epoch": 0.05976095617529881, "percentage": 0.85, "elapsed_time": "0:01:19", "remaining_time": "2:33:16"}
4
+ {"current_steps": 20, "total_steps": 1757, "loss": 0.4533, "lr": 4.3181818181818185e-06, "epoch": 0.0796812749003984, "percentage": 1.14, "elapsed_time": "0:01:42", "remaining_time": "2:28:58"}
5
+ {"current_steps": 25, "total_steps": 1757, "loss": 0.4295, "lr": 5.4545454545454545e-06, "epoch": 0.099601593625498, "percentage": 1.42, "elapsed_time": "0:02:04", "remaining_time": "2:23:50"}
6
+ {"current_steps": 30, "total_steps": 1757, "loss": 0.4162, "lr": 6.590909090909091e-06, "epoch": 0.11952191235059761, "percentage": 1.71, "elapsed_time": "0:02:25", "remaining_time": "2:19:39"}
7
+ {"current_steps": 35, "total_steps": 1757, "loss": 0.4118, "lr": 7.727272727272727e-06, "epoch": 0.1394422310756972, "percentage": 1.99, "elapsed_time": "0:02:47", "remaining_time": "2:17:15"}
8
+ {"current_steps": 40, "total_steps": 1757, "loss": 0.4114, "lr": 8.863636363636365e-06, "epoch": 0.1593625498007968, "percentage": 2.28, "elapsed_time": "0:03:05", "remaining_time": "2:12:57"}
9
+ {"current_steps": 45, "total_steps": 1757, "loss": 0.3755, "lr": 1e-05, "epoch": 0.17928286852589642, "percentage": 2.56, "elapsed_time": "0:03:30", "remaining_time": "2:13:28"}
10
+ {"current_steps": 50, "total_steps": 1757, "loss": 0.373, "lr": 1.1136363636363637e-05, "epoch": 0.199203187250996, "percentage": 2.85, "elapsed_time": "0:03:55", "remaining_time": "2:13:46"}
11
+ {"current_steps": 55, "total_steps": 1757, "loss": 0.3583, "lr": 1.2272727272727274e-05, "epoch": 0.21912350597609562, "percentage": 3.13, "elapsed_time": "0:04:16", "remaining_time": "2:12:19"}
12
+ {"current_steps": 60, "total_steps": 1757, "loss": 0.3168, "lr": 1.3409090909090909e-05, "epoch": 0.23904382470119523, "percentage": 3.41, "elapsed_time": "0:04:40", "remaining_time": "2:12:19"}
13
+ {"current_steps": 65, "total_steps": 1757, "loss": 0.286, "lr": 1.4545454545454546e-05, "epoch": 0.2589641434262948, "percentage": 3.7, "elapsed_time": "0:05:00", "remaining_time": "2:10:14"}
14
+ {"current_steps": 70, "total_steps": 1757, "loss": 0.3046, "lr": 1.5681818181818182e-05, "epoch": 0.2788844621513944, "percentage": 3.98, "elapsed_time": "0:05:25", "remaining_time": "2:10:38"}
15
+ {"current_steps": 75, "total_steps": 1757, "loss": 0.307, "lr": 1.681818181818182e-05, "epoch": 0.29880478087649404, "percentage": 4.27, "elapsed_time": "0:05:48", "remaining_time": "2:10:05"}
16
+ {"current_steps": 80, "total_steps": 1757, "loss": 0.2744, "lr": 1.7954545454545454e-05, "epoch": 0.3187250996015936, "percentage": 4.55, "elapsed_time": "0:06:13", "remaining_time": "2:10:25"}
17
+ {"current_steps": 85, "total_steps": 1757, "loss": 0.2952, "lr": 1.9090909090909094e-05, "epoch": 0.3386454183266932, "percentage": 4.84, "elapsed_time": "0:06:39", "remaining_time": "2:11:03"}
18
+ {"current_steps": 90, "total_steps": 1757, "loss": 0.2609, "lr": 2.022727272727273e-05, "epoch": 0.35856573705179284, "percentage": 5.12, "elapsed_time": "0:07:02", "remaining_time": "2:10:21"}
19
+ {"current_steps": 95, "total_steps": 1757, "loss": 0.3032, "lr": 2.1363636363636365e-05, "epoch": 0.3784860557768924, "percentage": 5.41, "elapsed_time": "0:07:25", "remaining_time": "2:09:56"}
20
+ {"current_steps": 100, "total_steps": 1757, "loss": 0.2917, "lr": 2.25e-05, "epoch": 0.398406374501992, "percentage": 5.69, "elapsed_time": "0:07:46", "remaining_time": "2:08:49"}
21
+ {"current_steps": 105, "total_steps": 1757, "loss": 0.2818, "lr": 2.363636363636364e-05, "epoch": 0.41832669322709165, "percentage": 5.98, "elapsed_time": "0:08:09", "remaining_time": "2:08:22"}
22
+ {"current_steps": 110, "total_steps": 1757, "loss": 0.2923, "lr": 2.4772727272727273e-05, "epoch": 0.43824701195219123, "percentage": 6.26, "elapsed_time": "0:08:30", "remaining_time": "2:07:26"}
23
+ {"current_steps": 115, "total_steps": 1757, "loss": 0.2848, "lr": 2.590909090909091e-05, "epoch": 0.4581673306772908, "percentage": 6.55, "elapsed_time": "0:08:50", "remaining_time": "2:06:20"}
24
+ {"current_steps": 120, "total_steps": 1757, "loss": 0.2425, "lr": 2.704545454545455e-05, "epoch": 0.47808764940239046, "percentage": 6.83, "elapsed_time": "0:09:12", "remaining_time": "2:05:30"}
25
+ {"current_steps": 125, "total_steps": 1757, "loss": 0.291, "lr": 2.8181818181818185e-05, "epoch": 0.49800796812749004, "percentage": 7.11, "elapsed_time": "0:09:31", "remaining_time": "2:04:22"}
26
+ {"current_steps": 130, "total_steps": 1757, "loss": 0.2721, "lr": 2.931818181818182e-05, "epoch": 0.5179282868525896, "percentage": 7.4, "elapsed_time": "0:09:54", "remaining_time": "2:03:54"}
27
+ {"current_steps": 135, "total_steps": 1757, "loss": 0.2541, "lr": 3.0454545454545456e-05, "epoch": 0.5378486055776892, "percentage": 7.68, "elapsed_time": "0:10:16", "remaining_time": "2:03:28"}
28
+ {"current_steps": 140, "total_steps": 1757, "loss": 0.2869, "lr": 3.159090909090909e-05, "epoch": 0.5577689243027888, "percentage": 7.97, "elapsed_time": "0:10:43", "remaining_time": "2:03:52"}
29
+ {"current_steps": 145, "total_steps": 1757, "loss": 0.2876, "lr": 3.272727272727273e-05, "epoch": 0.5776892430278885, "percentage": 8.25, "elapsed_time": "0:11:04", "remaining_time": "2:03:08"}
30
+ {"current_steps": 150, "total_steps": 1757, "loss": 0.2528, "lr": 3.3863636363636364e-05, "epoch": 0.5976095617529881, "percentage": 8.54, "elapsed_time": "0:11:30", "remaining_time": "2:03:12"}
31
+ {"current_steps": 155, "total_steps": 1757, "loss": 0.2367, "lr": 3.5000000000000004e-05, "epoch": 0.6175298804780877, "percentage": 8.82, "elapsed_time": "0:11:51", "remaining_time": "2:02:38"}
32
+ {"current_steps": 160, "total_steps": 1757, "loss": 0.2819, "lr": 3.613636363636364e-05, "epoch": 0.6374501992031872, "percentage": 9.11, "elapsed_time": "0:12:16", "remaining_time": "2:02:33"}
33
+ {"current_steps": 165, "total_steps": 1757, "loss": 0.2595, "lr": 3.7272727272727276e-05, "epoch": 0.6573705179282868, "percentage": 9.39, "elapsed_time": "0:12:37", "remaining_time": "2:01:50"}
34
+ {"current_steps": 170, "total_steps": 1757, "loss": 0.2823, "lr": 3.840909090909091e-05, "epoch": 0.6772908366533864, "percentage": 9.68, "elapsed_time": "0:12:56", "remaining_time": "2:00:52"}
35
+ {"current_steps": 175, "total_steps": 1757, "loss": 0.2811, "lr": 3.954545454545455e-05, "epoch": 0.6972111553784861, "percentage": 9.96, "elapsed_time": "0:13:17", "remaining_time": "2:00:11"}
36
+ {"current_steps": 180, "total_steps": 1757, "loss": 0.244, "lr": 3.9999644632891305e-05, "epoch": 0.7171314741035857, "percentage": 10.24, "elapsed_time": "0:13:38", "remaining_time": "1:59:33"}
37
+ {"current_steps": 185, "total_steps": 1757, "loss": 0.2311, "lr": 3.999747299073774e-05, "epoch": 0.7370517928286853, "percentage": 10.53, "elapsed_time": "0:13:58", "remaining_time": "1:58:48"}
38
+ {"current_steps": 190, "total_steps": 1757, "loss": 0.2481, "lr": 3.999332734671065e-05, "epoch": 0.7569721115537849, "percentage": 10.81, "elapsed_time": "0:14:19", "remaining_time": "1:58:05"}
39
+ {"current_steps": 195, "total_steps": 1757, "loss": 0.2492, "lr": 3.998720811003718e-05, "epoch": 0.7768924302788844, "percentage": 11.1, "elapsed_time": "0:14:43", "remaining_time": "1:57:58"}
40
+ {"current_steps": 200, "total_steps": 1757, "loss": 0.2581, "lr": 3.99791158847629e-05, "epoch": 0.796812749003984, "percentage": 11.38, "elapsed_time": "0:15:08", "remaining_time": "1:57:51"}
41
+ {"current_steps": 205, "total_steps": 1757, "loss": 0.2405, "lr": 3.996905146969211e-05, "epoch": 0.8167330677290837, "percentage": 11.67, "elapsed_time": "0:16:26", "remaining_time": "2:04:27"}
42
+ {"current_steps": 210, "total_steps": 1757, "loss": 0.2675, "lr": 3.995701585830907e-05, "epoch": 0.8366533864541833, "percentage": 11.95, "elapsed_time": "0:16:46", "remaining_time": "2:03:35"}
43
+ {"current_steps": 215, "total_steps": 1757, "loss": 0.2642, "lr": 3.9943010238679845e-05, "epoch": 0.8565737051792829, "percentage": 12.24, "elapsed_time": "0:17:07", "remaining_time": "2:02:52"}
44
+ {"current_steps": 220, "total_steps": 1757, "loss": 0.265, "lr": 3.9927035993335095e-05, "epoch": 0.8764940239043825, "percentage": 12.52, "elapsed_time": "0:17:30", "remaining_time": "2:02:16"}
45
+ {"current_steps": 225, "total_steps": 1757, "loss": 0.2441, "lr": 3.9909094699133565e-05, "epoch": 0.896414342629482, "percentage": 12.81, "elapsed_time": "0:17:52", "remaining_time": "2:01:45"}
46
+ {"current_steps": 230, "total_steps": 1757, "loss": 0.2582, "lr": 3.988918812710646e-05, "epoch": 0.9163346613545816, "percentage": 13.09, "elapsed_time": "0:18:16", "remaining_time": "2:01:22"}
47
+ {"current_steps": 235, "total_steps": 1757, "loss": 0.2709, "lr": 3.9867318242282566e-05, "epoch": 0.9362549800796812, "percentage": 13.38, "elapsed_time": "0:18:41", "remaining_time": "2:01:01"}
48
+ {"current_steps": 240, "total_steps": 1757, "loss": 0.2492, "lr": 3.984348720349434e-05, "epoch": 0.9561752988047809, "percentage": 13.66, "elapsed_time": "0:19:05", "remaining_time": "2:00:40"}
49
+ {"current_steps": 245, "total_steps": 1757, "loss": 0.2565, "lr": 3.981769736316478e-05, "epoch": 0.9760956175298805, "percentage": 13.94, "elapsed_time": "0:19:26", "remaining_time": "1:59:58"}
50
+ {"current_steps": 250, "total_steps": 1757, "loss": 0.2575, "lr": 3.97899512670752e-05, "epoch": 0.9960159362549801, "percentage": 14.23, "elapsed_time": "0:19:45", "remaining_time": "1:59:05"}
51
+ {"current_steps": 255, "total_steps": 1757, "loss": 0.2512, "lr": 3.9760251654113935e-05, "epoch": 1.0159362549800797, "percentage": 14.51, "elapsed_time": "0:20:07", "remaining_time": "1:58:33"}
52
+ {"current_steps": 260, "total_steps": 1757, "loss": 0.2207, "lr": 3.9728601456005995e-05, "epoch": 1.0358565737051793, "percentage": 14.8, "elapsed_time": "0:20:29", "remaining_time": "1:58:00"}
53
+ {"current_steps": 265, "total_steps": 1757, "loss": 0.23, "lr": 3.969500379702365e-05, "epoch": 1.0557768924302788, "percentage": 15.08, "elapsed_time": "0:20:51", "remaining_time": "1:57:26"}
54
+ {"current_steps": 270, "total_steps": 1757, "loss": 0.2333, "lr": 3.965946199367804e-05, "epoch": 1.0756972111553784, "percentage": 15.37, "elapsed_time": "0:21:15", "remaining_time": "1:57:02"}
55
+ {"current_steps": 275, "total_steps": 1757, "loss": 0.2455, "lr": 3.962197955439177e-05, "epoch": 1.095617529880478, "percentage": 15.65, "elapsed_time": "0:21:46", "remaining_time": "1:57:22"}
56
+ {"current_steps": 280, "total_steps": 1757, "loss": 0.2278, "lr": 3.9582560179152605e-05, "epoch": 1.1155378486055776, "percentage": 15.94, "elapsed_time": "0:22:09", "remaining_time": "1:56:51"}
57
+ {"current_steps": 285, "total_steps": 1757, "loss": 0.2225, "lr": 3.9541207759148216e-05, "epoch": 1.1354581673306772, "percentage": 16.22, "elapsed_time": "0:22:29", "remaining_time": "1:56:12"}
58
+ {"current_steps": 290, "total_steps": 1757, "loss": 0.227, "lr": 3.949792637638212e-05, "epoch": 1.155378486055777, "percentage": 16.51, "elapsed_time": "0:22:49", "remaining_time": "1:55:26"}
59
+ {"current_steps": 295, "total_steps": 1757, "loss": 0.2498, "lr": 3.945272030327064e-05, "epoch": 1.1752988047808766, "percentage": 16.79, "elapsed_time": "0:23:09", "remaining_time": "1:54:47"}
60
+ {"current_steps": 300, "total_steps": 1757, "loss": 0.2578, "lr": 3.9405594002221284e-05, "epoch": 1.1952191235059761, "percentage": 17.07, "elapsed_time": "0:23:29", "remaining_time": "1:54:06"}
61
+ {"current_steps": 305, "total_steps": 1757, "loss": 0.2291, "lr": 3.935655212519214e-05, "epoch": 1.2151394422310757, "percentage": 17.36, "elapsed_time": "0:23:49", "remaining_time": "1:53:25"}
62
+ {"current_steps": 310, "total_steps": 1757, "loss": 0.2503, "lr": 3.930559951323273e-05, "epoch": 1.2350597609561753, "percentage": 17.64, "elapsed_time": "0:24:10", "remaining_time": "1:52:49"}
63
+ {"current_steps": 315, "total_steps": 1757, "loss": 0.2438, "lr": 3.925274119600614e-05, "epoch": 1.254980079681275, "percentage": 17.93, "elapsed_time": "0:24:30", "remaining_time": "1:52:12"}
64
+ {"current_steps": 320, "total_steps": 1757, "loss": 0.2094, "lr": 3.9197982391292505e-05, "epoch": 1.2749003984063745, "percentage": 18.21, "elapsed_time": "0:24:50", "remaining_time": "1:51:33"}
65
+ {"current_steps": 325, "total_steps": 1757, "loss": 0.2225, "lr": 3.914132850447393e-05, "epoch": 1.294820717131474, "percentage": 18.5, "elapsed_time": "0:25:12", "remaining_time": "1:51:04"}
66
+ {"current_steps": 330, "total_steps": 1757, "loss": 0.2305, "lr": 3.908278512800098e-05, "epoch": 1.3147410358565736, "percentage": 18.78, "elapsed_time": "0:25:33", "remaining_time": "1:50:30"}
67
+ {"current_steps": 335, "total_steps": 1757, "loss": 0.2368, "lr": 3.902235804084057e-05, "epoch": 1.3346613545816732, "percentage": 19.07, "elapsed_time": "0:25:53", "remaining_time": "1:49:54"}
68
+ {"current_steps": 340, "total_steps": 1757, "loss": 0.2123, "lr": 3.896005320790552e-05, "epoch": 1.354581673306773, "percentage": 19.35, "elapsed_time": "0:26:18", "remaining_time": "1:49:39"}
69
+ {"current_steps": 345, "total_steps": 1757, "loss": 0.2302, "lr": 3.889587677946578e-05, "epoch": 1.3745019920318726, "percentage": 19.64, "elapsed_time": "0:26:42", "remaining_time": "1:49:20"}
70
+ {"current_steps": 350, "total_steps": 1757, "loss": 0.2667, "lr": 3.882983509054128e-05, "epoch": 1.3944223107569722, "percentage": 19.92, "elapsed_time": "0:27:03", "remaining_time": "1:48:46"}
71
+ {"current_steps": 355, "total_steps": 1757, "loss": 0.2304, "lr": 3.876193466027658e-05, "epoch": 1.4143426294820718, "percentage": 20.2, "elapsed_time": "0:27:27", "remaining_time": "1:48:27"}
72
+ {"current_steps": 360, "total_steps": 1757, "loss": 0.2001, "lr": 3.8692182191297424e-05, "epoch": 1.4342629482071714, "percentage": 20.49, "elapsed_time": "0:27:46", "remaining_time": "1:47:45"}
73
+ {"current_steps": 365, "total_steps": 1757, "loss": 0.2228, "lr": 3.862058456904898e-05, "epoch": 1.454183266932271, "percentage": 20.77, "elapsed_time": "0:28:06", "remaining_time": "1:47:12"}
74
+ {"current_steps": 370, "total_steps": 1757, "loss": 0.2226, "lr": 3.8547148861116287e-05, "epoch": 1.4741035856573705, "percentage": 21.06, "elapsed_time": "0:28:28", "remaining_time": "1:46:42"}
75
+ {"current_steps": 375, "total_steps": 1757, "loss": 0.2559, "lr": 3.8471882316526506e-05, "epoch": 1.4940239043824701, "percentage": 21.34, "elapsed_time": "0:28:50", "remaining_time": "1:46:18"}
76
+ {"current_steps": 380, "total_steps": 1757, "loss": 0.2003, "lr": 3.839479236503339e-05, "epoch": 1.5139442231075697, "percentage": 21.63, "elapsed_time": "0:29:11", "remaining_time": "1:45:47"}
77
+ {"current_steps": 385, "total_steps": 1757, "loss": 0.2303, "lr": 3.831588661638387e-05, "epoch": 1.5338645418326693, "percentage": 21.91, "elapsed_time": "0:29:31", "remaining_time": "1:45:13"}
78
+ {"current_steps": 390, "total_steps": 1757, "loss": 0.2333, "lr": 3.8235172859566866e-05, "epoch": 1.5537848605577689, "percentage": 22.2, "elapsed_time": "0:29:55", "remaining_time": "1:44:52"}
79
+ {"current_steps": 395, "total_steps": 1757, "loss": 0.2344, "lr": 3.815265906204444e-05, "epoch": 1.5737051792828685, "percentage": 22.48, "elapsed_time": "0:30:17", "remaining_time": "1:44:26"}
80
+ {"current_steps": 400, "total_steps": 1757, "loss": 0.2046, "lr": 3.8068353368965276e-05, "epoch": 1.593625498007968, "percentage": 22.77, "elapsed_time": "0:30:41", "remaining_time": "1:44:07"}
81
+ {"current_steps": 405, "total_steps": 1757, "loss": 0.204, "lr": 3.7982264102360664e-05, "epoch": 1.6135458167330676, "percentage": 23.05, "elapsed_time": "0:32:40", "remaining_time": "1:49:03"}
82
+ {"current_steps": 410, "total_steps": 1757, "loss": 0.2123, "lr": 3.789439976032303e-05, "epoch": 1.6334661354581672, "percentage": 23.34, "elapsed_time": "0:33:03", "remaining_time": "1:48:37"}
83
+ {"current_steps": 415, "total_steps": 1757, "loss": 0.2457, "lr": 3.7804769016167036e-05, "epoch": 1.6533864541832668, "percentage": 23.62, "elapsed_time": "0:33:28", "remaining_time": "1:48:16"}
84
+ {"current_steps": 420, "total_steps": 1757, "loss": 0.2137, "lr": 3.771338071757344e-05, "epoch": 1.6733067729083664, "percentage": 23.9, "elapsed_time": "0:33:47", "remaining_time": "1:47:35"}
85
+ {"current_steps": 425, "total_steps": 1757, "loss": 0.2633, "lr": 3.7620243885715695e-05, "epoch": 1.6932270916334662, "percentage": 24.19, "elapsed_time": "0:34:16", "remaining_time": "1:47:25"}
86
+ {"current_steps": 430, "total_steps": 1757, "loss": 0.2182, "lr": 3.7525367714369476e-05, "epoch": 1.7131474103585658, "percentage": 24.47, "elapsed_time": "0:34:40", "remaining_time": "1:47:00"}
87
+ {"current_steps": 435, "total_steps": 1757, "loss": 0.2233, "lr": 3.7428761569005106e-05, "epoch": 1.7330677290836654, "percentage": 24.76, "elapsed_time": "0:35:04", "remaining_time": "1:46:34"}
88
+ {"current_steps": 440, "total_steps": 1757, "loss": 0.2444, "lr": 3.73304349858631e-05, "epoch": 1.752988047808765, "percentage": 25.04, "elapsed_time": "0:35:32", "remaining_time": "1:46:23"}
89
+ {"current_steps": 445, "total_steps": 1757, "loss": 0.2251, "lr": 3.7230397671012786e-05, "epoch": 1.7729083665338645, "percentage": 25.33, "elapsed_time": "0:35:52", "remaining_time": "1:45:45"}
90
+ {"current_steps": 450, "total_steps": 1757, "loss": 0.2274, "lr": 3.712865949939422e-05, "epoch": 1.792828685258964, "percentage": 25.61, "elapsed_time": "0:36:11", "remaining_time": "1:45:06"}
91
+ {"current_steps": 455, "total_steps": 1757, "loss": 0.2276, "lr": 3.7025230513843384e-05, "epoch": 1.812749003984064, "percentage": 25.9, "elapsed_time": "0:36:36", "remaining_time": "1:44:45"}
92
+ {"current_steps": 460, "total_steps": 1757, "loss": 0.2132, "lr": 3.692012092410084e-05, "epoch": 1.8326693227091635, "percentage": 26.18, "elapsed_time": "0:37:00", "remaining_time": "1:44:22"}
93
+ {"current_steps": 465, "total_steps": 1757, "loss": 0.2102, "lr": 3.6813341105803916e-05, "epoch": 1.852589641434263, "percentage": 26.47, "elapsed_time": "0:37:25", "remaining_time": "1:43:58"}
94
+ {"current_steps": 470, "total_steps": 1757, "loss": 0.2131, "lr": 3.670490159946248e-05, "epoch": 1.8725099601593627, "percentage": 26.75, "elapsed_time": "0:37:47", "remaining_time": "1:43:28"}
95
+ {"current_steps": 475, "total_steps": 1757, "loss": 0.2477, "lr": 3.6594813109418455e-05, "epoch": 1.8924302788844622, "percentage": 27.03, "elapsed_time": "0:38:09", "remaining_time": "1:42:58"}
96
+ {"current_steps": 480, "total_steps": 1757, "loss": 0.2276, "lr": 3.64830865027892e-05, "epoch": 1.9123505976095618, "percentage": 27.32, "elapsed_time": "0:38:28", "remaining_time": "1:42:22"}
97
+ {"current_steps": 485, "total_steps": 1757, "loss": 0.2305, "lr": 3.636973280839474e-05, "epoch": 1.9322709163346614, "percentage": 27.6, "elapsed_time": "0:38:48", "remaining_time": "1:41:46"}
98
+ {"current_steps": 490, "total_steps": 1757, "loss": 0.2168, "lr": 3.6254763215669126e-05, "epoch": 1.952191235059761, "percentage": 27.89, "elapsed_time": "0:39:11", "remaining_time": "1:41:19"}
99
+ {"current_steps": 495, "total_steps": 1757, "loss": 0.2454, "lr": 3.6138189073555876e-05, "epoch": 1.9721115537848606, "percentage": 28.17, "elapsed_time": "0:39:33", "remaining_time": "1:40:51"}
100
+ {"current_steps": 500, "total_steps": 1757, "loss": 0.236, "lr": 3.602002188938769e-05, "epoch": 1.9920318725099602, "percentage": 28.46, "elapsed_time": "0:39:58", "remaining_time": "1:40:28"}
101
+ {"current_steps": 505, "total_steps": 1757, "loss": 0.2211, "lr": 3.590027332775055e-05, "epoch": 2.0119521912350598, "percentage": 28.74, "elapsed_time": "0:40:19", "remaining_time": "1:39:57"}
102
+ {"current_steps": 510, "total_steps": 1757, "loss": 0.2063, "lr": 3.577895520933225e-05, "epoch": 2.0318725099601593, "percentage": 29.03, "elapsed_time": "0:40:41", "remaining_time": "1:39:29"}
103
+ {"current_steps": 515, "total_steps": 1757, "loss": 0.1939, "lr": 3.5656079509755556e-05, "epoch": 2.051792828685259, "percentage": 29.31, "elapsed_time": "0:41:03", "remaining_time": "1:39:00"}
104
+ {"current_steps": 520, "total_steps": 1757, "loss": 0.1943, "lr": 3.5531658358396095e-05, "epoch": 2.0717131474103585, "percentage": 29.6, "elapsed_time": "0:41:26", "remaining_time": "1:38:34"}
105
+ {"current_steps": 525, "total_steps": 1757, "loss": 0.1988, "lr": 3.540570403718499e-05, "epoch": 2.091633466135458, "percentage": 29.88, "elapsed_time": "0:41:49", "remaining_time": "1:38:09"}
106
+ {"current_steps": 530, "total_steps": 1757, "loss": 0.2096, "lr": 3.5278228979396476e-05, "epoch": 2.1115537848605577, "percentage": 30.17, "elapsed_time": "0:42:09", "remaining_time": "1:37:35"}
107
+ {"current_steps": 535, "total_steps": 1757, "loss": 0.198, "lr": 3.514924576842064e-05, "epoch": 2.1314741035856573, "percentage": 30.45, "elapsed_time": "0:42:32", "remaining_time": "1:37:11"}
108
+ {"current_steps": 540, "total_steps": 1757, "loss": 0.2051, "lr": 3.501876713652119e-05, "epoch": 2.151394422310757, "percentage": 30.73, "elapsed_time": "0:42:55", "remaining_time": "1:36:44"}
109
+ {"current_steps": 545, "total_steps": 1757, "loss": 0.2098, "lr": 3.488680596357872e-05, "epoch": 2.1713147410358564, "percentage": 31.02, "elapsed_time": "0:43:16", "remaining_time": "1:36:14"}
110
+ {"current_steps": 550, "total_steps": 1757, "loss": 0.1895, "lr": 3.475337527581921e-05, "epoch": 2.191235059760956, "percentage": 31.3, "elapsed_time": "0:43:40", "remaining_time": "1:35:51"}
111
+ {"current_steps": 555, "total_steps": 1757, "loss": 0.2088, "lr": 3.461848824452826e-05, "epoch": 2.2111553784860556, "percentage": 31.59, "elapsed_time": "0:44:00", "remaining_time": "1:35:19"}
112
+ {"current_steps": 560, "total_steps": 1757, "loss": 0.1975, "lr": 3.448215818475084e-05, "epoch": 2.231075697211155, "percentage": 31.87, "elapsed_time": "0:44:22", "remaining_time": "1:34:51"}
113
+ {"current_steps": 565, "total_steps": 1757, "loss": 0.2106, "lr": 3.434439855397698e-05, "epoch": 2.2509960159362548, "percentage": 32.16, "elapsed_time": "0:44:47", "remaining_time": "1:34:28"}
114
+ {"current_steps": 570, "total_steps": 1757, "loss": 0.1983, "lr": 3.420522295081335e-05, "epoch": 2.2709163346613543, "percentage": 32.44, "elapsed_time": "0:45:15", "remaining_time": "1:34:14"}
115
+ {"current_steps": 575, "total_steps": 1757, "loss": 0.1974, "lr": 3.4064645113640856e-05, "epoch": 2.2908366533864544, "percentage": 32.73, "elapsed_time": "0:45:36", "remaining_time": "1:33:44"}
116
+ {"current_steps": 580, "total_steps": 1757, "loss": 0.2191, "lr": 3.392267891925854e-05, "epoch": 2.310756972111554, "percentage": 33.01, "elapsed_time": "0:46:00", "remaining_time": "1:33:22"}
117
+ {"current_steps": 585, "total_steps": 1757, "loss": 0.1886, "lr": 3.377933838151374e-05, "epoch": 2.3306772908366535, "percentage": 33.3, "elapsed_time": "0:46:19", "remaining_time": "1:32:48"}
118
+ {"current_steps": 590, "total_steps": 1757, "loss": 0.1925, "lr": 3.363463764991875e-05, "epoch": 2.350597609561753, "percentage": 33.58, "elapsed_time": "0:46:39", "remaining_time": "1:32:16"}
119
+ {"current_steps": 595, "total_steps": 1757, "loss": 0.2199, "lr": 3.348859100825411e-05, "epoch": 2.3705179282868527, "percentage": 33.86, "elapsed_time": "0:46:58", "remaining_time": "1:31:45"}
120
+ {"current_steps": 600, "total_steps": 1757, "loss": 0.1847, "lr": 3.334121287315858e-05, "epoch": 2.3904382470119523, "percentage": 34.15, "elapsed_time": "0:47:20", "remaining_time": "1:31:17"}
121
+ {"current_steps": 605, "total_steps": 1757, "loss": 0.1963, "lr": 3.3192517792706054e-05, "epoch": 2.410358565737052, "percentage": 34.43, "elapsed_time": "0:50:07", "remaining_time": "1:35:27"}
122
+ {"current_steps": 610, "total_steps": 1757, "loss": 0.2062, "lr": 3.3042520444969476e-05, "epoch": 2.4302788844621515, "percentage": 34.72, "elapsed_time": "0:50:34", "remaining_time": "1:35:06"}
123
+ {"current_steps": 615, "total_steps": 1757, "loss": 0.202, "lr": 3.289123563657195e-05, "epoch": 2.450199203187251, "percentage": 35.0, "elapsed_time": "0:50:55", "remaining_time": "1:34:34"}
124
+ {"current_steps": 620, "total_steps": 1757, "loss": 0.1999, "lr": 3.2738678301225115e-05, "epoch": 2.4701195219123506, "percentage": 35.29, "elapsed_time": "0:51:17", "remaining_time": "1:34:03"}
125
+ {"current_steps": 625, "total_steps": 1757, "loss": 0.1879, "lr": 3.2584863498255014e-05, "epoch": 2.49003984063745, "percentage": 35.57, "elapsed_time": "0:51:37", "remaining_time": "1:33:30"}
126
+ {"current_steps": 630, "total_steps": 1757, "loss": 0.1952, "lr": 3.242980641111555e-05, "epoch": 2.50996015936255, "percentage": 35.86, "elapsed_time": "0:52:00", "remaining_time": "1:33:02"}
127
+ {"current_steps": 635, "total_steps": 1757, "loss": 0.1886, "lr": 3.2273522345889674e-05, "epoch": 2.5298804780876494, "percentage": 36.14, "elapsed_time": "0:52:24", "remaining_time": "1:32:36"}
128
+ {"current_steps": 640, "total_steps": 1757, "loss": 0.2208, "lr": 3.2116026729778495e-05, "epoch": 2.549800796812749, "percentage": 36.43, "elapsed_time": "0:52:48", "remaining_time": "1:32:09"}
129
+ {"current_steps": 645, "total_steps": 1757, "loss": 0.218, "lr": 3.195733510957843e-05, "epoch": 2.5697211155378485, "percentage": 36.71, "elapsed_time": "0:53:07", "remaining_time": "1:31:35"}
130
+ {"current_steps": 650, "total_steps": 1757, "loss": 0.1976, "lr": 3.1797463150146524e-05, "epoch": 2.589641434262948, "percentage": 36.99, "elapsed_time": "0:53:26", "remaining_time": "1:31:00"}
131
+ {"current_steps": 655, "total_steps": 1757, "loss": 0.1932, "lr": 3.163642663285411e-05, "epoch": 2.6095617529880477, "percentage": 37.28, "elapsed_time": "0:53:45", "remaining_time": "1:30:26"}
132
+ {"current_steps": 660, "total_steps": 1757, "loss": 0.2091, "lr": 3.147424145402907e-05, "epoch": 2.6294820717131473, "percentage": 37.56, "elapsed_time": "0:54:09", "remaining_time": "1:30:00"}
133
+ {"current_steps": 665, "total_steps": 1757, "loss": 0.2003, "lr": 3.131092362338656e-05, "epoch": 2.649402390438247, "percentage": 37.85, "elapsed_time": "0:54:29", "remaining_time": "1:29:28"}
134
+ {"current_steps": 670, "total_steps": 1757, "loss": 0.1772, "lr": 3.114648926244873e-05, "epoch": 2.6693227091633465, "percentage": 38.13, "elapsed_time": "0:54:51", "remaining_time": "1:28:59"}
135
+ {"current_steps": 675, "total_steps": 1757, "loss": 0.194, "lr": 3.0980954602953296e-05, "epoch": 2.6892430278884465, "percentage": 38.42, "elapsed_time": "0:55:12", "remaining_time": "1:28:29"}
136
+ {"current_steps": 680, "total_steps": 1757, "loss": 0.1968, "lr": 3.081433598525127e-05, "epoch": 2.709163346613546, "percentage": 38.7, "elapsed_time": "0:55:31", "remaining_time": "1:27:57"}
137
+ {"current_steps": 685, "total_steps": 1757, "loss": 0.1961, "lr": 3.064664985669397e-05, "epoch": 2.7290836653386457, "percentage": 38.99, "elapsed_time": "0:55:54", "remaining_time": "1:27:29"}
138
+ {"current_steps": 690, "total_steps": 1757, "loss": 0.2163, "lr": 3.0477912770009407e-05, "epoch": 2.7490039840637452, "percentage": 39.27, "elapsed_time": "0:56:18", "remaining_time": "1:27:03"}
139
+ {"current_steps": 695, "total_steps": 1757, "loss": 0.1982, "lr": 3.0308141381668422e-05, "epoch": 2.768924302788845, "percentage": 39.56, "elapsed_time": "0:56:38", "remaining_time": "1:26:33"}
140
+ {"current_steps": 700, "total_steps": 1757, "loss": 0.2064, "lr": 3.0137352450240393e-05, "epoch": 2.7888446215139444, "percentage": 39.84, "elapsed_time": "0:56:59", "remaining_time": "1:26:03"}
141
+ {"current_steps": 705, "total_steps": 1757, "loss": 0.1991, "lr": 2.9965562834738992e-05, "epoch": 2.808764940239044, "percentage": 40.13, "elapsed_time": "0:57:18", "remaining_time": "1:25:30"}
142
+ {"current_steps": 710, "total_steps": 1757, "loss": 0.1882, "lr": 2.979278949295799e-05, "epoch": 2.8286852589641436, "percentage": 40.41, "elapsed_time": "0:57:42", "remaining_time": "1:25:06"}
143
+ {"current_steps": 715, "total_steps": 1757, "loss": 0.1828, "lr": 2.9619049479797303e-05, "epoch": 2.848605577689243, "percentage": 40.69, "elapsed_time": "0:58:07", "remaining_time": "1:24:42"}
144
+ {"current_steps": 720, "total_steps": 1757, "loss": 0.1934, "lr": 2.944435994557945e-05, "epoch": 2.8685258964143427, "percentage": 40.98, "elapsed_time": "0:58:30", "remaining_time": "1:24:16"}
145
+ {"current_steps": 725, "total_steps": 1757, "loss": 0.1874, "lr": 2.9268738134356624e-05, "epoch": 2.8884462151394423, "percentage": 41.26, "elapsed_time": "0:58:51", "remaining_time": "1:23:47"}
146
+ {"current_steps": 730, "total_steps": 1757, "loss": 0.1909, "lr": 2.909220138220846e-05, "epoch": 2.908366533864542, "percentage": 41.55, "elapsed_time": "0:59:10", "remaining_time": "1:23:15"}
147
+ {"current_steps": 735, "total_steps": 1757, "loss": 0.1873, "lr": 2.891476711553077e-05, "epoch": 2.9282868525896415, "percentage": 41.83, "elapsed_time": "0:59:34", "remaining_time": "1:22:49"}
148
+ {"current_steps": 740, "total_steps": 1757, "loss": 0.2124, "lr": 2.8736452849315322e-05, "epoch": 2.948207171314741, "percentage": 42.12, "elapsed_time": "0:59:55", "remaining_time": "1:22:22"}
149
+ {"current_steps": 745, "total_steps": 1757, "loss": 0.2011, "lr": 2.8557276185420892e-05, "epoch": 2.9681274900398407, "percentage": 42.4, "elapsed_time": "1:00:15", "remaining_time": "1:21:51"}
150
+ {"current_steps": 750, "total_steps": 1757, "loss": 0.217, "lr": 2.837725481083576e-05, "epoch": 2.9880478087649402, "percentage": 42.69, "elapsed_time": "1:00:37", "remaining_time": "1:21:24"}
151
+ {"current_steps": 755, "total_steps": 1757, "loss": 0.1988, "lr": 2.8196406495931753e-05, "epoch": 3.00796812749004, "percentage": 42.97, "elapsed_time": "1:01:06", "remaining_time": "1:21:05"}
152
+ {"current_steps": 760, "total_steps": 1757, "loss": 0.1712, "lr": 2.80147490927101e-05, "epoch": 3.0278884462151394, "percentage": 43.26, "elapsed_time": "1:01:27", "remaining_time": "1:20:37"}
153
+ {"current_steps": 765, "total_steps": 1757, "loss": 0.1692, "lr": 2.783230053303922e-05, "epoch": 3.047808764940239, "percentage": 43.54, "elapsed_time": "1:01:47", "remaining_time": "1:20:07"}
154
+ {"current_steps": 770, "total_steps": 1757, "loss": 0.1765, "lr": 2.7649078826884607e-05, "epoch": 3.0677290836653386, "percentage": 43.82, "elapsed_time": "1:02:08", "remaining_time": "1:19:38"}
155
+ {"current_steps": 775, "total_steps": 1757, "loss": 0.1692, "lr": 2.746510206053103e-05, "epoch": 3.087649402390438, "percentage": 44.11, "elapsed_time": "1:02:25", "remaining_time": "1:19:06"}
156
+ {"current_steps": 780, "total_steps": 1757, "loss": 0.1625, "lr": 2.72803883947972e-05, "epoch": 3.1075697211155378, "percentage": 44.39, "elapsed_time": "1:02:44", "remaining_time": "1:18:35"}
157
+ {"current_steps": 785, "total_steps": 1757, "loss": 0.1846, "lr": 2.7094956063243034e-05, "epoch": 3.1274900398406373, "percentage": 44.68, "elapsed_time": "1:03:03", "remaining_time": "1:18:04"}
158
+ {"current_steps": 790, "total_steps": 1757, "loss": 0.1717, "lr": 2.6908823370369827e-05, "epoch": 3.147410358565737, "percentage": 44.96, "elapsed_time": "1:03:22", "remaining_time": "1:17:34"}
159
+ {"current_steps": 795, "total_steps": 1757, "loss": 0.1839, "lr": 2.6722008689813303e-05, "epoch": 3.1673306772908365, "percentage": 45.25, "elapsed_time": "1:03:47", "remaining_time": "1:17:12"}
160
+ {"current_steps": 800, "total_steps": 1757, "loss": 0.1654, "lr": 2.6534530462529964e-05, "epoch": 3.187250996015936, "percentage": 45.53, "elapsed_time": "1:04:04", "remaining_time": "1:16:39"}
161
+ {"current_steps": 805, "total_steps": 1757, "loss": 0.1591, "lr": 2.6346407194976696e-05, "epoch": 3.2071713147410357, "percentage": 45.82, "elapsed_time": "1:05:30", "remaining_time": "1:17:27"}
162
+ {"current_steps": 810, "total_steps": 1757, "loss": 0.1824, "lr": 2.615765745728399e-05, "epoch": 3.2270916334661353, "percentage": 46.1, "elapsed_time": "1:05:57", "remaining_time": "1:17:06"}
163
+ {"current_steps": 815, "total_steps": 1757, "loss": 0.1681, "lr": 2.596829988142278e-05, "epoch": 3.247011952191235, "percentage": 46.39, "elapsed_time": "1:06:15", "remaining_time": "1:16:35"}
164
+ {"current_steps": 820, "total_steps": 1757, "loss": 0.1851, "lr": 2.57783531593653e-05, "epoch": 3.2669322709163344, "percentage": 46.67, "elapsed_time": "1:06:36", "remaining_time": "1:16:06"}
165
+ {"current_steps": 825, "total_steps": 1757, "loss": 0.1558, "lr": 2.5587836041239874e-05, "epoch": 3.2868525896414345, "percentage": 46.96, "elapsed_time": "1:06:57", "remaining_time": "1:15:38"}
166
+ {"current_steps": 830, "total_steps": 1757, "loss": 0.1712, "lr": 2.5396767333480128e-05, "epoch": 3.306772908366534, "percentage": 47.24, "elapsed_time": "1:07:22", "remaining_time": "1:15:15"}
167
+ {"current_steps": 835, "total_steps": 1757, "loss": 0.1749, "lr": 2.5205165896968485e-05, "epoch": 3.3266932270916336, "percentage": 47.52, "elapsed_time": "1:07:44", "remaining_time": "1:14:48"}
168
+ {"current_steps": 840, "total_steps": 1757, "loss": 0.1853, "lr": 2.5013050645174414e-05, "epoch": 3.346613545816733, "percentage": 47.81, "elapsed_time": "1:08:05", "remaining_time": "1:14:19"}
169
+ {"current_steps": 845, "total_steps": 1757, "loss": 0.1647, "lr": 2.4820440542287386e-05, "epoch": 3.366533864541833, "percentage": 48.09, "elapsed_time": "1:08:27", "remaining_time": "1:13:53"}
170
+ {"current_steps": 850, "total_steps": 1757, "loss": 0.1908, "lr": 2.4627354601344927e-05, "epoch": 3.3864541832669324, "percentage": 48.38, "elapsed_time": "1:08:55", "remaining_time": "1:13:33"}
171
+ {"current_steps": 855, "total_steps": 1757, "loss": 0.1896, "lr": 2.443381188235571e-05, "epoch": 3.406374501992032, "percentage": 48.66, "elapsed_time": "1:09:20", "remaining_time": "1:13:09"}
172
+ {"current_steps": 860, "total_steps": 1757, "loss": 0.1653, "lr": 2.4239831490418184e-05, "epoch": 3.4262948207171315, "percentage": 48.95, "elapsed_time": "1:09:39", "remaining_time": "1:12:39"}
173
+ {"current_steps": 865, "total_steps": 1757, "loss": 0.17, "lr": 2.4045432573834596e-05, "epoch": 3.446215139442231, "percentage": 49.23, "elapsed_time": "1:09:59", "remaining_time": "1:12:10"}
174
+ {"current_steps": 870, "total_steps": 1757, "loss": 0.1607, "lr": 2.385063432222084e-05, "epoch": 3.4661354581673307, "percentage": 49.52, "elapsed_time": "1:10:19", "remaining_time": "1:11:42"}
175
+ {"current_steps": 875, "total_steps": 1757, "loss": 0.1762, "lr": 2.36554559646122e-05, "epoch": 3.4860557768924303, "percentage": 49.8, "elapsed_time": "1:10:42", "remaining_time": "1:11:16"}
176
+ {"current_steps": 880, "total_steps": 1757, "loss": 0.1788, "lr": 2.3459916767565196e-05, "epoch": 3.50597609561753, "percentage": 50.09, "elapsed_time": "1:11:06", "remaining_time": "1:10:51"}
177
+ {"current_steps": 885, "total_steps": 1757, "loss": 0.1762, "lr": 2.3264036033255718e-05, "epoch": 3.5258964143426295, "percentage": 50.37, "elapsed_time": "1:11:27", "remaining_time": "1:10:24"}
178
+ {"current_steps": 890, "total_steps": 1757, "loss": 0.174, "lr": 2.3067833097573713e-05, "epoch": 3.545816733067729, "percentage": 50.65, "elapsed_time": "1:11:46", "remaining_time": "1:09:55"}
179
+ {"current_steps": 895, "total_steps": 1757, "loss": 0.1836, "lr": 2.2871327328214408e-05, "epoch": 3.5657370517928286, "percentage": 50.94, "elapsed_time": "1:12:07", "remaining_time": "1:09:28"}
180
+ {"current_steps": 900, "total_steps": 1757, "loss": 0.173, "lr": 2.2674538122766555e-05, "epoch": 3.585657370517928, "percentage": 51.22, "elapsed_time": "1:12:26", "remaining_time": "1:08:58"}
181
+ {"current_steps": 905, "total_steps": 1757, "loss": 0.1944, "lr": 2.247748490679759e-05, "epoch": 3.605577689243028, "percentage": 51.51, "elapsed_time": "1:12:48", "remaining_time": "1:08:32"}
182
+ {"current_steps": 910, "total_steps": 1757, "loss": 0.1839, "lr": 2.228018713193611e-05, "epoch": 3.6254980079681274, "percentage": 51.79, "elapsed_time": "1:13:13", "remaining_time": "1:08:09"}
183
+ {"current_steps": 915, "total_steps": 1757, "loss": 0.1755, "lr": 2.208266427395175e-05, "epoch": 3.645418326693227, "percentage": 52.08, "elapsed_time": "1:13:34", "remaining_time": "1:07:42"}
184
+ {"current_steps": 920, "total_steps": 1757, "loss": 0.1682, "lr": 2.1884935830832683e-05, "epoch": 3.6653386454183265, "percentage": 52.36, "elapsed_time": "1:13:53", "remaining_time": "1:07:13"}
185
+ {"current_steps": 925, "total_steps": 1757, "loss": 0.1906, "lr": 2.1687021320860893e-05, "epoch": 3.685258964143426, "percentage": 52.65, "elapsed_time": "1:14:15", "remaining_time": "1:06:47"}
186
+ {"current_steps": 930, "total_steps": 1757, "loss": 0.1891, "lr": 2.148894028068555e-05, "epoch": 3.705179282868526, "percentage": 52.93, "elapsed_time": "1:14:39", "remaining_time": "1:06:23"}
187
+ {"current_steps": 935, "total_steps": 1757, "loss": 0.185, "lr": 2.129071226339438e-05, "epoch": 3.7250996015936257, "percentage": 53.22, "elapsed_time": "1:15:01", "remaining_time": "1:05:57"}
188
+ {"current_steps": 940, "total_steps": 1757, "loss": 0.1774, "lr": 2.1092356836583665e-05, "epoch": 3.7450199203187253, "percentage": 53.5, "elapsed_time": "1:15:26", "remaining_time": "1:05:33"}
189
+ {"current_steps": 945, "total_steps": 1757, "loss": 0.1675, "lr": 2.089389358042655e-05, "epoch": 3.764940239043825, "percentage": 53.78, "elapsed_time": "1:15:47", "remaining_time": "1:05:07"}
190
+ {"current_steps": 950, "total_steps": 1757, "loss": 0.1846, "lr": 2.069534208574034e-05, "epoch": 3.7848605577689245, "percentage": 54.07, "elapsed_time": "1:16:07", "remaining_time": "1:04:40"}
191
+ {"current_steps": 955, "total_steps": 1757, "loss": 0.1548, "lr": 2.0496721952052558e-05, "epoch": 3.804780876494024, "percentage": 54.35, "elapsed_time": "1:16:27", "remaining_time": "1:04:12"}
192
+ {"current_steps": 960, "total_steps": 1757, "loss": 0.1805, "lr": 2.0298052785666274e-05, "epoch": 3.8247011952191237, "percentage": 54.64, "elapsed_time": "1:16:46", "remaining_time": "1:03:44"}
193
+ {"current_steps": 965, "total_steps": 1757, "loss": 0.1966, "lr": 2.0099354197724693e-05, "epoch": 3.8446215139442232, "percentage": 54.92, "elapsed_time": "1:17:05", "remaining_time": "1:03:16"}
194
+ {"current_steps": 970, "total_steps": 1757, "loss": 0.1965, "lr": 1.990064580227531e-05, "epoch": 3.864541832669323, "percentage": 55.21, "elapsed_time": "1:17:25", "remaining_time": "1:02:49"}
195
+ {"current_steps": 975, "total_steps": 1757, "loss": 0.1575, "lr": 1.970194721433373e-05, "epoch": 3.8844621513944224, "percentage": 55.49, "elapsed_time": "1:17:47", "remaining_time": "1:02:23"}
196
+ {"current_steps": 980, "total_steps": 1757, "loss": 0.153, "lr": 1.950327804794745e-05, "epoch": 3.904382470119522, "percentage": 55.78, "elapsed_time": "1:18:09", "remaining_time": "1:01:58"}
197
+ {"current_steps": 985, "total_steps": 1757, "loss": 0.1741, "lr": 1.9304657914259663e-05, "epoch": 3.9243027888446216, "percentage": 56.06, "elapsed_time": "1:18:34", "remaining_time": "1:01:34"}
198
+ {"current_steps": 990, "total_steps": 1757, "loss": 0.169, "lr": 1.910610641957345e-05, "epoch": 3.944223107569721, "percentage": 56.35, "elapsed_time": "1:18:51", "remaining_time": "1:01:05"}
199
+ {"current_steps": 995, "total_steps": 1757, "loss": 0.1733, "lr": 1.8907643163416348e-05, "epoch": 3.9641434262948207, "percentage": 56.63, "elapsed_time": "1:19:13", "remaining_time": "1:00:40"}
200
+ {"current_steps": 1000, "total_steps": 1757, "loss": 0.1836, "lr": 1.8709287736605626e-05, "epoch": 3.9840637450199203, "percentage": 56.92, "elapsed_time": "1:19:37", "remaining_time": "1:00:16"}
201
+ {"current_steps": 1005, "total_steps": 1757, "loss": 0.1618, "lr": 1.851105971931446e-05, "epoch": 4.00398406374502, "percentage": 57.2, "elapsed_time": "1:20:54", "remaining_time": "1:00:32"}
202
+ {"current_steps": 1010, "total_steps": 1757, "loss": 0.1467, "lr": 1.831297867913911e-05, "epoch": 4.0239043824701195, "percentage": 57.48, "elapsed_time": "1:21:16", "remaining_time": "1:00:06"}
203
+ {"current_steps": 1015, "total_steps": 1757, "loss": 0.1544, "lr": 1.8115064169167323e-05, "epoch": 4.043824701195219, "percentage": 57.77, "elapsed_time": "1:21:39", "remaining_time": "0:59:41"}
204
+ {"current_steps": 1020, "total_steps": 1757, "loss": 0.1397, "lr": 1.7917335726048254e-05, "epoch": 4.063745019920319, "percentage": 58.05, "elapsed_time": "1:22:00", "remaining_time": "0:59:14"}
205
+ {"current_steps": 1025, "total_steps": 1757, "loss": 0.1611, "lr": 1.7719812868063893e-05, "epoch": 4.083665338645418, "percentage": 58.34, "elapsed_time": "1:22:19", "remaining_time": "0:58:47"}
206
+ {"current_steps": 1030, "total_steps": 1757, "loss": 0.165, "lr": 1.7522515093202418e-05, "epoch": 4.103585657370518, "percentage": 58.62, "elapsed_time": "1:22:40", "remaining_time": "0:58:21"}
207
+ {"current_steps": 1035, "total_steps": 1757, "loss": 0.1346, "lr": 1.7325461877233448e-05, "epoch": 4.123505976095617, "percentage": 58.91, "elapsed_time": "1:23:04", "remaining_time": "0:57:57"}
208
+ {"current_steps": 1040, "total_steps": 1757, "loss": 0.1488, "lr": 1.7128672671785596e-05, "epoch": 4.143426294820717, "percentage": 59.19, "elapsed_time": "1:23:28", "remaining_time": "0:57:33"}
209
+ {"current_steps": 1045, "total_steps": 1757, "loss": 0.1517, "lr": 1.693216690242629e-05, "epoch": 4.163346613545817, "percentage": 59.48, "elapsed_time": "1:23:51", "remaining_time": "0:57:07"}
210
+ {"current_steps": 1050, "total_steps": 1757, "loss": 0.1508, "lr": 1.6735963966744286e-05, "epoch": 4.183266932270916, "percentage": 59.76, "elapsed_time": "1:24:10", "remaining_time": "0:56:40"}
211
+ {"current_steps": 1055, "total_steps": 1757, "loss": 0.1436, "lr": 1.6540083232434814e-05, "epoch": 4.203187250996016, "percentage": 60.05, "elapsed_time": "1:24:31", "remaining_time": "0:56:14"}
212
+ {"current_steps": 1060, "total_steps": 1757, "loss": 0.1573, "lr": 1.6344544035387806e-05, "epoch": 4.223107569721115, "percentage": 60.33, "elapsed_time": "1:24:53", "remaining_time": "0:55:49"}
213
+ {"current_steps": 1065, "total_steps": 1757, "loss": 0.1522, "lr": 1.6149365677779162e-05, "epoch": 4.243027888446215, "percentage": 60.61, "elapsed_time": "1:25:16", "remaining_time": "0:55:24"}
214
+ {"current_steps": 1070, "total_steps": 1757, "loss": 0.1333, "lr": 1.595456742616541e-05, "epoch": 4.2629482071713145, "percentage": 60.9, "elapsed_time": "1:25:37", "remaining_time": "0:54:58"}
215
+ {"current_steps": 1075, "total_steps": 1757, "loss": 0.1792, "lr": 1.5760168509581822e-05, "epoch": 4.282868525896414, "percentage": 61.18, "elapsed_time": "1:25:59", "remaining_time": "0:54:33"}
216
+ {"current_steps": 1080, "total_steps": 1757, "loss": 0.14, "lr": 1.5566188117644295e-05, "epoch": 4.302788844621514, "percentage": 61.47, "elapsed_time": "1:26:19", "remaining_time": "0:54:07"}
217
+ {"current_steps": 1085, "total_steps": 1757, "loss": 0.162, "lr": 1.537264539865508e-05, "epoch": 4.322709163346613, "percentage": 61.75, "elapsed_time": "1:26:39", "remaining_time": "0:53:40"}
218
+ {"current_steps": 1090, "total_steps": 1757, "loss": 0.1401, "lr": 1.5179559457712614e-05, "epoch": 4.342629482071713, "percentage": 62.04, "elapsed_time": "1:27:02", "remaining_time": "0:53:15"}
219
+ {"current_steps": 1095, "total_steps": 1757, "loss": 0.1482, "lr": 1.498694935482559e-05, "epoch": 4.362549800796812, "percentage": 62.32, "elapsed_time": "1:27:26", "remaining_time": "0:52:52"}
220
+ {"current_steps": 1100, "total_steps": 1757, "loss": 0.1489, "lr": 1.4794834103031516e-05, "epoch": 4.382470119521912, "percentage": 62.61, "elapsed_time": "1:27:47", "remaining_time": "0:52:25"}
221
+ {"current_steps": 1105, "total_steps": 1757, "loss": 0.1555, "lr": 1.4603232666519884e-05, "epoch": 4.402390438247012, "percentage": 62.89, "elapsed_time": "1:28:08", "remaining_time": "0:52:00"}
222
+ {"current_steps": 1110, "total_steps": 1757, "loss": 0.1534, "lr": 1.4412163958760133e-05, "epoch": 4.422310756972111, "percentage": 63.18, "elapsed_time": "1:28:32", "remaining_time": "0:51:36"}
223
+ {"current_steps": 1115, "total_steps": 1757, "loss": 0.1611, "lr": 1.4221646840634711e-05, "epoch": 4.442231075697211, "percentage": 63.46, "elapsed_time": "1:28:51", "remaining_time": "0:51:09"}
224
+ {"current_steps": 1120, "total_steps": 1757, "loss": 0.1554, "lr": 1.4031700118577223e-05, "epoch": 4.46215139442231, "percentage": 63.75, "elapsed_time": "1:29:11", "remaining_time": "0:50:43"}
225
+ {"current_steps": 1125, "total_steps": 1757, "loss": 0.1562, "lr": 1.3842342542716015e-05, "epoch": 4.482071713147411, "percentage": 64.03, "elapsed_time": "1:29:34", "remaining_time": "0:50:19"}
226
+ {"current_steps": 1130, "total_steps": 1757, "loss": 0.1567, "lr": 1.3653592805023305e-05, "epoch": 4.5019920318725095, "percentage": 64.31, "elapsed_time": "1:29:53", "remaining_time": "0:49:52"}
227
+ {"current_steps": 1135, "total_steps": 1757, "loss": 0.1338, "lr": 1.3465469537470042e-05, "epoch": 4.52191235059761, "percentage": 64.6, "elapsed_time": "1:30:13", "remaining_time": "0:49:26"}
228
+ {"current_steps": 1140, "total_steps": 1757, "loss": 0.1605, "lr": 1.3277991310186702e-05, "epoch": 4.541832669322709, "percentage": 64.88, "elapsed_time": "1:30:37", "remaining_time": "0:49:02"}
229
+ {"current_steps": 1145, "total_steps": 1757, "loss": 0.1816, "lr": 1.3091176629630176e-05, "epoch": 4.561752988047809, "percentage": 65.17, "elapsed_time": "1:31:00", "remaining_time": "0:48:38"}
230
+ {"current_steps": 1150, "total_steps": 1757, "loss": 0.1466, "lr": 1.2905043936756964e-05, "epoch": 4.581673306772909, "percentage": 65.45, "elapsed_time": "1:31:17", "remaining_time": "0:48:11"}
231
+ {"current_steps": 1155, "total_steps": 1757, "loss": 0.1391, "lr": 1.2719611605202799e-05, "epoch": 4.601593625498008, "percentage": 65.74, "elapsed_time": "1:31:38", "remaining_time": "0:47:45"}
232
+ {"current_steps": 1160, "total_steps": 1757, "loss": 0.1581, "lr": 1.2534897939468973e-05, "epoch": 4.621513944223108, "percentage": 66.02, "elapsed_time": "1:31:59", "remaining_time": "0:47:20"}
233
+ {"current_steps": 1165, "total_steps": 1757, "loss": 0.164, "lr": 1.2350921173115403e-05, "epoch": 4.6414342629482075, "percentage": 66.31, "elapsed_time": "1:32:24", "remaining_time": "0:46:57"}
234
+ {"current_steps": 1170, "total_steps": 1757, "loss": 0.1599, "lr": 1.2167699466960788e-05, "epoch": 4.661354581673307, "percentage": 66.59, "elapsed_time": "1:32:48", "remaining_time": "0:46:34"}
235
+ {"current_steps": 1175, "total_steps": 1757, "loss": 0.17, "lr": 1.1985250907289906e-05, "epoch": 4.681274900398407, "percentage": 66.88, "elapsed_time": "1:33:15", "remaining_time": "0:46:11"}
236
+ {"current_steps": 1180, "total_steps": 1757, "loss": 0.1526, "lr": 1.1803593504068256e-05, "epoch": 4.701195219123506, "percentage": 67.16, "elapsed_time": "1:33:34", "remaining_time": "0:45:45"}
237
+ {"current_steps": 1185, "total_steps": 1757, "loss": 0.1404, "lr": 1.1622745189164249e-05, "epoch": 4.721115537848606, "percentage": 67.44, "elapsed_time": "1:33:57", "remaining_time": "0:45:21"}
238
+ {"current_steps": 1190, "total_steps": 1757, "loss": 0.1599, "lr": 1.1442723814579111e-05, "epoch": 4.741035856573705, "percentage": 67.73, "elapsed_time": "1:34:14", "remaining_time": "0:44:54"}
239
+ {"current_steps": 1195, "total_steps": 1757, "loss": 0.1691, "lr": 1.1263547150684686e-05, "epoch": 4.760956175298805, "percentage": 68.01, "elapsed_time": "1:34:32", "remaining_time": "0:44:27"}
240
+ {"current_steps": 1200, "total_steps": 1757, "loss": 0.1469, "lr": 1.1085232884469236e-05, "epoch": 4.780876494023905, "percentage": 68.3, "elapsed_time": "1:34:50", "remaining_time": "0:44:01"}
241
+ {"current_steps": 1205, "total_steps": 1757, "loss": 0.17, "lr": 1.0907798617791548e-05, "epoch": 4.800796812749004, "percentage": 68.58, "elapsed_time": "1:36:14", "remaining_time": "0:44:05"}
242
+ {"current_steps": 1210, "total_steps": 1757, "loss": 0.1523, "lr": 1.0731261865643376e-05, "epoch": 4.820717131474104, "percentage": 68.87, "elapsed_time": "1:36:35", "remaining_time": "0:43:40"}
243
+ {"current_steps": 1215, "total_steps": 1757, "loss": 0.1507, "lr": 1.0555640054420558e-05, "epoch": 4.840637450199203, "percentage": 69.15, "elapsed_time": "1:36:55", "remaining_time": "0:43:14"}
244
+ {"current_steps": 1220, "total_steps": 1757, "loss": 0.1866, "lr": 1.0380950520202705e-05, "epoch": 4.860557768924303, "percentage": 69.44, "elapsed_time": "1:37:22", "remaining_time": "0:42:51"}
245
+ {"current_steps": 1225, "total_steps": 1757, "loss": 0.1539, "lr": 1.0207210507042013e-05, "epoch": 4.8804780876494025, "percentage": 69.72, "elapsed_time": "1:37:46", "remaining_time": "0:42:27"}
246
+ {"current_steps": 1230, "total_steps": 1757, "loss": 0.1785, "lr": 1.0034437165261013e-05, "epoch": 4.900398406374502, "percentage": 70.01, "elapsed_time": "1:38:11", "remaining_time": "0:42:04"}
247
+ {"current_steps": 1235, "total_steps": 1757, "loss": 0.1693, "lr": 9.862647549759614e-06, "epoch": 4.920318725099602, "percentage": 70.29, "elapsed_time": "1:38:32", "remaining_time": "0:41:39"}
248
+ {"current_steps": 1240, "total_steps": 1757, "loss": 0.1713, "lr": 9.691858618331586e-06, "epoch": 4.940239043824701, "percentage": 70.57, "elapsed_time": "1:38:56", "remaining_time": "0:41:15"}
249
+ {"current_steps": 1245, "total_steps": 1757, "loss": 0.1556, "lr": 9.5220872299906e-06, "epoch": 4.960159362549801, "percentage": 70.86, "elapsed_time": "1:39:16", "remaining_time": "0:40:49"}
250
+ {"current_steps": 1250, "total_steps": 1757, "loss": 0.1441, "lr": 9.353350143306037e-06, "epoch": 4.9800796812749, "percentage": 71.14, "elapsed_time": "1:39:35", "remaining_time": "0:40:23"}
251
+ {"current_steps": 1255, "total_steps": 1757, "loss": 0.1324, "lr": 9.185664014748728e-06, "epoch": 5.0, "percentage": 71.43, "elapsed_time": "1:39:55", "remaining_time": "0:39:58"}
252
+ {"current_steps": 1260, "total_steps": 1757, "loss": 0.1418, "lr": 9.019045397046703e-06, "epoch": 5.0199203187251, "percentage": 71.71, "elapsed_time": "1:40:19", "remaining_time": "0:39:34"}
253
+ {"current_steps": 1265, "total_steps": 1757, "loss": 0.1276, "lr": 8.853510737551274e-06, "epoch": 5.039840637450199, "percentage": 72.0, "elapsed_time": "1:40:38", "remaining_time": "0:39:08"}
254
+ {"current_steps": 1270, "total_steps": 1757, "loss": 0.1406, "lr": 8.689076376613446e-06, "epoch": 5.059760956175299, "percentage": 72.28, "elapsed_time": "1:40:59", "remaining_time": "0:38:43"}
255
+ {"current_steps": 1275, "total_steps": 1757, "loss": 0.1257, "lr": 8.525758545970937e-06, "epoch": 5.079681274900398, "percentage": 72.57, "elapsed_time": "1:41:22", "remaining_time": "0:38:19"}
256
+ {"current_steps": 1280, "total_steps": 1757, "loss": 0.1453, "lr": 8.363573367145892e-06, "epoch": 5.099601593625498, "percentage": 72.85, "elapsed_time": "1:41:43", "remaining_time": "0:37:54"}
257
+ {"current_steps": 1285, "total_steps": 1757, "loss": 0.1485, "lr": 8.202536849853482e-06, "epoch": 5.1195219123505975, "percentage": 73.14, "elapsed_time": "1:42:06", "remaining_time": "0:37:30"}
258
+ {"current_steps": 1290, "total_steps": 1757, "loss": 0.1199, "lr": 8.04266489042157e-06, "epoch": 5.139442231075697, "percentage": 73.42, "elapsed_time": "1:42:34", "remaining_time": "0:37:08"}
259
+ {"current_steps": 1295, "total_steps": 1757, "loss": 0.1441, "lr": 7.883973270221508e-06, "epoch": 5.159362549800797, "percentage": 73.71, "elapsed_time": "1:42:56", "remaining_time": "0:36:43"}
260
+ {"current_steps": 1300, "total_steps": 1757, "loss": 0.1527, "lr": 7.726477654110333e-06, "epoch": 5.179282868525896, "percentage": 73.99, "elapsed_time": "1:43:17", "remaining_time": "0:36:18"}
261
+ {"current_steps": 1305, "total_steps": 1757, "loss": 0.1419, "lr": 7.570193588884458e-06, "epoch": 5.199203187250996, "percentage": 74.27, "elapsed_time": "1:43:36", "remaining_time": "0:35:53"}
262
+ {"current_steps": 1310, "total_steps": 1757, "loss": 0.1245, "lr": 7.415136501744986e-06, "epoch": 5.219123505976095, "percentage": 74.56, "elapsed_time": "1:43:57", "remaining_time": "0:35:28"}
263
+ {"current_steps": 1315, "total_steps": 1757, "loss": 0.1365, "lr": 7.261321698774888e-06, "epoch": 5.239043824701195, "percentage": 74.84, "elapsed_time": "1:44:17", "remaining_time": "0:35:03"}
264
+ {"current_steps": 1320, "total_steps": 1757, "loss": 0.1368, "lr": 7.1087643634280625e-06, "epoch": 5.258964143426295, "percentage": 75.13, "elapsed_time": "1:44:41", "remaining_time": "0:34:39"}
265
+ {"current_steps": 1325, "total_steps": 1757, "loss": 0.1396, "lr": 6.957479555030528e-06, "epoch": 5.278884462151394, "percentage": 75.41, "elapsed_time": "1:45:02", "remaining_time": "0:34:14"}
266
+ {"current_steps": 1330, "total_steps": 1757, "loss": 0.1498, "lr": 6.807482207293954e-06, "epoch": 5.298804780876494, "percentage": 75.7, "elapsed_time": "1:45:22", "remaining_time": "0:33:49"}
267
+ {"current_steps": 1335, "total_steps": 1757, "loss": 0.1297, "lr": 6.658787126841426e-06, "epoch": 5.318725099601593, "percentage": 75.98, "elapsed_time": "1:45:44", "remaining_time": "0:33:25"}
268
+ {"current_steps": 1340, "total_steps": 1757, "loss": 0.1423, "lr": 6.511408991745893e-06, "epoch": 5.338645418326693, "percentage": 76.27, "elapsed_time": "1:46:07", "remaining_time": "0:33:01"}
269
+ {"current_steps": 1345, "total_steps": 1757, "loss": 0.1361, "lr": 6.36536235008125e-06, "epoch": 5.3585657370517925, "percentage": 76.55, "elapsed_time": "1:46:26", "remaining_time": "0:32:36"}
270
+ {"current_steps": 1350, "total_steps": 1757, "loss": 0.1361, "lr": 6.220661618486268e-06, "epoch": 5.378486055776892, "percentage": 76.84, "elapsed_time": "1:46:50", "remaining_time": "0:32:12"}
271
+ {"current_steps": 1355, "total_steps": 1757, "loss": 0.1389, "lr": 6.077321080741469e-06, "epoch": 5.398406374501992, "percentage": 77.12, "elapsed_time": "1:47:08", "remaining_time": "0:31:47"}
272
+ {"current_steps": 1360, "total_steps": 1757, "loss": 0.1461, "lr": 5.9353548863591504e-06, "epoch": 5.418326693227091, "percentage": 77.4, "elapsed_time": "1:47:28", "remaining_time": "0:31:22"}
273
+ {"current_steps": 1365, "total_steps": 1757, "loss": 0.1516, "lr": 5.794777049186657e-06, "epoch": 5.438247011952191, "percentage": 77.69, "elapsed_time": "1:47:51", "remaining_time": "0:30:58"}
274
+ {"current_steps": 1370, "total_steps": 1757, "loss": 0.1338, "lr": 5.655601446023016e-06, "epoch": 5.45816733067729, "percentage": 77.97, "elapsed_time": "1:48:12", "remaining_time": "0:30:34"}
275
+ {"current_steps": 1375, "total_steps": 1757, "loss": 0.1404, "lr": 5.51784181524917e-06, "epoch": 5.47808764940239, "percentage": 78.26, "elapsed_time": "1:48:33", "remaining_time": "0:30:09"}
276
+ {"current_steps": 1380, "total_steps": 1757, "loss": 0.1269, "lr": 5.381511755471751e-06, "epoch": 5.4980079681274905, "percentage": 78.54, "elapsed_time": "1:48:54", "remaining_time": "0:29:45"}
277
+ {"current_steps": 1385, "total_steps": 1757, "loss": 0.1445, "lr": 5.246624724180789e-06, "epoch": 5.517928286852589, "percentage": 78.83, "elapsed_time": "1:49:19", "remaining_time": "0:29:21"}
278
+ {"current_steps": 1390, "total_steps": 1757, "loss": 0.1351, "lr": 5.113194036421285e-06, "epoch": 5.53784860557769, "percentage": 79.11, "elapsed_time": "1:49:39", "remaining_time": "0:28:57"}
279
+ {"current_steps": 1395, "total_steps": 1757, "loss": 0.1413, "lr": 4.9812328634788135e-06, "epoch": 5.557768924302788, "percentage": 79.4, "elapsed_time": "1:50:00", "remaining_time": "0:28:32"}
280
+ {"current_steps": 1400, "total_steps": 1757, "loss": 0.1636, "lr": 4.850754231579371e-06, "epoch": 5.577689243027889, "percentage": 79.68, "elapsed_time": "1:50:23", "remaining_time": "0:28:09"}
281
+ {"current_steps": 1405, "total_steps": 1757, "loss": 0.1232, "lr": 4.721771020603523e-06, "epoch": 5.597609561752988, "percentage": 79.97, "elapsed_time": "1:51:38", "remaining_time": "0:27:58"}
282
+ {"current_steps": 1410, "total_steps": 1757, "loss": 0.1511, "lr": 4.594295962815018e-06, "epoch": 5.617529880478088, "percentage": 80.25, "elapsed_time": "1:51:56", "remaining_time": "0:27:32"}
283
+ {"current_steps": 1415, "total_steps": 1757, "loss": 0.1331, "lr": 4.468341641603908e-06, "epoch": 5.637450199203188, "percentage": 80.54, "elapsed_time": "1:52:17", "remaining_time": "0:27:08"}
284
+ {"current_steps": 1420, "total_steps": 1757, "loss": 0.1377, "lr": 4.343920490244449e-06, "epoch": 5.657370517928287, "percentage": 80.82, "elapsed_time": "1:52:43", "remaining_time": "0:26:45"}
285
+ {"current_steps": 1425, "total_steps": 1757, "loss": 0.1494, "lr": 4.221044790667761e-06, "epoch": 5.677290836653387, "percentage": 81.1, "elapsed_time": "1:53:05", "remaining_time": "0:26:20"}
286
+ {"current_steps": 1430, "total_steps": 1757, "loss": 0.1433, "lr": 4.0997266722494555e-06, "epoch": 5.697211155378486, "percentage": 81.39, "elapsed_time": "1:53:25", "remaining_time": "0:25:56"}
287
+ {"current_steps": 1435, "total_steps": 1757, "loss": 0.1592, "lr": 3.979978110612313e-06, "epoch": 5.717131474103586, "percentage": 81.67, "elapsed_time": "1:53:50", "remaining_time": "0:25:32"}
288
+ {"current_steps": 1440, "total_steps": 1757, "loss": 0.1408, "lr": 3.861810926444129e-06, "epoch": 5.7370517928286855, "percentage": 81.96, "elapsed_time": "1:54:10", "remaining_time": "0:25:08"}
289
+ {"current_steps": 1445, "total_steps": 1757, "loss": 0.1464, "lr": 3.7452367843308745e-06, "epoch": 5.756972111553785, "percentage": 82.24, "elapsed_time": "1:54:34", "remaining_time": "0:24:44"}
290
+ {"current_steps": 1450, "total_steps": 1757, "loss": 0.1349, "lr": 3.6302671916052634e-06, "epoch": 5.776892430278885, "percentage": 82.53, "elapsed_time": "1:54:59", "remaining_time": "0:24:20"}
291
+ {"current_steps": 1455, "total_steps": 1757, "loss": 0.1438, "lr": 3.5169134972108078e-06, "epoch": 5.796812749003984, "percentage": 82.81, "elapsed_time": "1:55:21", "remaining_time": "0:23:56"}
292
+ {"current_steps": 1460, "total_steps": 1757, "loss": 0.1452, "lr": 3.405186890581551e-06, "epoch": 5.816733067729084, "percentage": 83.1, "elapsed_time": "1:55:42", "remaining_time": "0:23:32"}
293
+ {"current_steps": 1465, "total_steps": 1757, "loss": 0.1557, "lr": 3.2950984005375264e-06, "epoch": 5.836653386454183, "percentage": 83.38, "elapsed_time": "1:56:01", "remaining_time": "0:23:07"}
294
+ {"current_steps": 1470, "total_steps": 1757, "loss": 0.1622, "lr": 3.1866588941960886e-06, "epoch": 5.856573705179283, "percentage": 83.67, "elapsed_time": "1:56:21", "remaining_time": "0:22:43"}
295
+ {"current_steps": 1475, "total_steps": 1757, "loss": 0.1379, "lr": 3.079879075899166e-06, "epoch": 5.876494023904383, "percentage": 83.95, "elapsed_time": "1:56:43", "remaining_time": "0:22:19"}
296
+ {"current_steps": 1480, "total_steps": 1757, "loss": 0.1422, "lr": 2.9747694861566235e-06, "epoch": 5.896414342629482, "percentage": 84.23, "elapsed_time": "1:57:04", "remaining_time": "0:21:54"}
297
+ {"current_steps": 1485, "total_steps": 1757, "loss": 0.1572, "lr": 2.871340500605784e-06, "epoch": 5.916334661354582, "percentage": 84.52, "elapsed_time": "1:57:22", "remaining_time": "0:21:30"}
298
+ {"current_steps": 1490, "total_steps": 1757, "loss": 0.1367, "lr": 2.7696023289872153e-06, "epoch": 5.936254980079681, "percentage": 84.8, "elapsed_time": "1:57:44", "remaining_time": "0:21:05"}
299
+ {"current_steps": 1495, "total_steps": 1757, "loss": 0.145, "lr": 2.6695650141369036e-06, "epoch": 5.956175298804781, "percentage": 85.09, "elapsed_time": "1:58:05", "remaining_time": "0:20:41"}
300
+ {"current_steps": 1500, "total_steps": 1757, "loss": 0.1521, "lr": 2.5712384309948977e-06, "epoch": 5.9760956175298805, "percentage": 85.37, "elapsed_time": "1:58:25", "remaining_time": "0:20:17"}
301
+ {"current_steps": 1505, "total_steps": 1757, "loss": 0.1465, "lr": 2.4746322856305293e-06, "epoch": 5.99601593625498, "percentage": 85.66, "elapsed_time": "1:58:44", "remaining_time": "0:19:52"}
302
+ {"current_steps": 1510, "total_steps": 1757, "loss": 0.1267, "lr": 2.3797561142843107e-06, "epoch": 6.01593625498008, "percentage": 85.94, "elapsed_time": "1:59:03", "remaining_time": "0:19:28"}
303
+ {"current_steps": 1515, "total_steps": 1757, "loss": 0.1457, "lr": 2.2866192824265676e-06, "epoch": 6.035856573705179, "percentage": 86.23, "elapsed_time": "1:59:28", "remaining_time": "0:19:05"}
304
+ {"current_steps": 1520, "total_steps": 1757, "loss": 0.1122, "lr": 2.19523098383297e-06, "epoch": 6.055776892430279, "percentage": 86.51, "elapsed_time": "1:59:53", "remaining_time": "0:18:41"}
305
+ {"current_steps": 1525, "total_steps": 1757, "loss": 0.1481, "lr": 2.1056002396769724e-06, "epoch": 6.075697211155378, "percentage": 86.8, "elapsed_time": "2:00:16", "remaining_time": "0:18:17"}
306
+ {"current_steps": 1530, "total_steps": 1757, "loss": 0.1292, "lr": 2.017735897639339e-06, "epoch": 6.095617529880478, "percentage": 87.08, "elapsed_time": "2:00:35", "remaining_time": "0:17:53"}
307
+ {"current_steps": 1535, "total_steps": 1757, "loss": 0.1278, "lr": 1.9316466310347314e-06, "epoch": 6.115537848605578, "percentage": 87.36, "elapsed_time": "2:00:54", "remaining_time": "0:17:29"}
308
+ {"current_steps": 1540, "total_steps": 1757, "loss": 0.1434, "lr": 1.8473409379555684e-06, "epoch": 6.135458167330677, "percentage": 87.65, "elapsed_time": "2:01:13", "remaining_time": "0:17:04"}
309
+ {"current_steps": 1545, "total_steps": 1757, "loss": 0.1175, "lr": 1.7648271404331386e-06, "epoch": 6.155378486055777, "percentage": 87.93, "elapsed_time": "2:01:31", "remaining_time": "0:16:40"}
310
+ {"current_steps": 1550, "total_steps": 1757, "loss": 0.1303, "lr": 1.6841133836161371e-06, "epoch": 6.175298804780876, "percentage": 88.22, "elapsed_time": "2:01:52", "remaining_time": "0:16:16"}
311
+ {"current_steps": 1555, "total_steps": 1757, "loss": 0.1169, "lr": 1.605207634966617e-06, "epoch": 6.195219123505976, "percentage": 88.5, "elapsed_time": "2:02:12", "remaining_time": "0:15:52"}
312
+ {"current_steps": 1560, "total_steps": 1757, "loss": 0.123, "lr": 1.5281176834735e-06, "epoch": 6.2151394422310755, "percentage": 88.79, "elapsed_time": "2:02:40", "remaining_time": "0:15:29"}
313
+ {"current_steps": 1565, "total_steps": 1757, "loss": 0.1449, "lr": 1.4528511388837153e-06, "epoch": 6.235059760956175, "percentage": 89.07, "elapsed_time": "2:03:01", "remaining_time": "0:15:05"}
314
+ {"current_steps": 1570, "total_steps": 1757, "loss": 0.1519, "lr": 1.3794154309510189e-06, "epoch": 6.254980079681275, "percentage": 89.36, "elapsed_time": "2:03:21", "remaining_time": "0:14:41"}
315
+ {"current_steps": 1575, "total_steps": 1757, "loss": 0.1415, "lr": 1.3078178087025827e-06, "epoch": 6.274900398406374, "percentage": 89.64, "elapsed_time": "2:03:44", "remaining_time": "0:14:17"}
316
+ {"current_steps": 1580, "total_steps": 1757, "loss": 0.1388, "lr": 1.23806533972342e-06, "epoch": 6.294820717131474, "percentage": 89.93, "elapsed_time": "2:04:07", "remaining_time": "0:13:54"}
317
+ {"current_steps": 1585, "total_steps": 1757, "loss": 0.1339, "lr": 1.170164909458731e-06, "epoch": 6.314741035856573, "percentage": 90.21, "elapsed_time": "2:04:26", "remaining_time": "0:13:30"}
318
+ {"current_steps": 1590, "total_steps": 1757, "loss": 0.1483, "lr": 1.1041232205342257e-06, "epoch": 6.334661354581673, "percentage": 90.5, "elapsed_time": "2:04:46", "remaining_time": "0:13:06"}
319
+ {"current_steps": 1595, "total_steps": 1757, "loss": 0.1419, "lr": 1.0399467920944862e-06, "epoch": 6.354581673306773, "percentage": 90.78, "elapsed_time": "2:05:10", "remaining_time": "0:12:42"}
320
+ {"current_steps": 1600, "total_steps": 1757, "loss": 0.1335, "lr": 9.77641959159441e-07, "epoch": 6.374501992031872, "percentage": 91.06, "elapsed_time": "2:05:30", "remaining_time": "0:12:18"}
321
+ {"current_steps": 1605, "total_steps": 1757, "loss": 0.1353, "lr": 9.172148719990237e-07, "epoch": 6.394422310756972, "percentage": 91.35, "elapsed_time": "2:06:42", "remaining_time": "0:12:00"}
322
+ {"current_steps": 1610, "total_steps": 1757, "loss": 0.138, "lr": 8.586714955260733e-07, "epoch": 6.414342629482071, "percentage": 91.63, "elapsed_time": "2:07:04", "remaining_time": "0:11:36"}
323
+ {"current_steps": 1615, "total_steps": 1757, "loss": 0.1352, "lr": 8.020176087075038e-07, "epoch": 6.434262948207171, "percentage": 91.92, "elapsed_time": "2:07:26", "remaining_time": "0:11:12"}
324
+ {"current_steps": 1620, "total_steps": 1757, "loss": 0.1328, "lr": 7.472588039938622e-07, "epoch": 6.4541832669322705, "percentage": 92.2, "elapsed_time": "2:07:48", "remaining_time": "0:10:48"}
325
+ {"current_steps": 1625, "total_steps": 1757, "loss": 0.1361, "lr": 6.944004867672727e-07, "epoch": 6.474103585657371, "percentage": 92.49, "elapsed_time": "2:08:09", "remaining_time": "0:10:24"}
326
+ {"current_steps": 1630, "total_steps": 1757, "loss": 0.1316, "lr": 6.434478748078676e-07, "epoch": 6.49402390438247, "percentage": 92.77, "elapsed_time": "2:08:26", "remaining_time": "0:10:00"}
327
+ {"current_steps": 1635, "total_steps": 1757, "loss": 0.1244, "lr": 5.944059977787242e-07, "epoch": 6.51394422310757, "percentage": 93.06, "elapsed_time": "2:08:49", "remaining_time": "0:09:36"}
328
+ {"current_steps": 1640, "total_steps": 1757, "loss": 0.1498, "lr": 5.472796967293614e-07, "epoch": 6.533864541832669, "percentage": 93.34, "elapsed_time": "2:09:12", "remaining_time": "0:09:13"}
329
+ {"current_steps": 1645, "total_steps": 1757, "loss": 0.1426, "lr": 5.020736236178913e-07, "epoch": 6.553784860557769, "percentage": 93.63, "elapsed_time": "2:09:31", "remaining_time": "0:08:49"}
330
+ {"current_steps": 1650, "total_steps": 1757, "loss": 0.1291, "lr": 4.587922408517864e-07, "epoch": 6.573705179282869, "percentage": 93.91, "elapsed_time": "2:09:52", "remaining_time": "0:08:25"}
331
+ {"current_steps": 1655, "total_steps": 1757, "loss": 0.1388, "lr": 4.174398208474051e-07, "epoch": 6.5936254980079685, "percentage": 94.19, "elapsed_time": "2:10:11", "remaining_time": "0:08:01"}
332
+ {"current_steps": 1660, "total_steps": 1757, "loss": 0.1453, "lr": 3.780204456082381e-07, "epoch": 6.613545816733068, "percentage": 94.48, "elapsed_time": "2:10:35", "remaining_time": "0:07:37"}
333
+ {"current_steps": 1665, "total_steps": 1757, "loss": 0.1354, "lr": 3.4053800632196434e-07, "epoch": 6.633466135458168, "percentage": 94.76, "elapsed_time": "2:10:56", "remaining_time": "0:07:14"}
334
+ {"current_steps": 1670, "total_steps": 1757, "loss": 0.1238, "lr": 3.0499620297635315e-07, "epoch": 6.653386454183267, "percentage": 95.05, "elapsed_time": "2:11:15", "remaining_time": "0:06:50"}
335
+ {"current_steps": 1675, "total_steps": 1757, "loss": 0.122, "lr": 2.7139854399401256e-07, "epoch": 6.673306772908367, "percentage": 95.33, "elapsed_time": "2:11:36", "remaining_time": "0:06:26"}
336
+ {"current_steps": 1680, "total_steps": 1757, "loss": 0.135, "lr": 2.3974834588607496e-07, "epoch": 6.693227091633466, "percentage": 95.62, "elapsed_time": "2:11:54", "remaining_time": "0:06:02"}
337
+ {"current_steps": 1685, "total_steps": 1757, "loss": 0.132, "lr": 2.1004873292480797e-07, "epoch": 6.713147410358566, "percentage": 95.9, "elapsed_time": "2:12:17", "remaining_time": "0:05:39"}
338
+ {"current_steps": 1690, "total_steps": 1757, "loss": 0.1483, "lr": 1.823026368352232e-07, "epoch": 6.733067729083666, "percentage": 96.19, "elapsed_time": "2:12:45", "remaining_time": "0:05:15"}
339
+ {"current_steps": 1695, "total_steps": 1757, "loss": 0.1189, "lr": 1.5651279650565897e-07, "epoch": 6.752988047808765, "percentage": 96.47, "elapsed_time": "2:13:07", "remaining_time": "0:04:52"}
340
+ {"current_steps": 1700, "total_steps": 1757, "loss": 0.128, "lr": 1.3268175771743663e-07, "epoch": 6.772908366533865, "percentage": 96.76, "elapsed_time": "2:13:30", "remaining_time": "0:04:28"}
341
+ {"current_steps": 1705, "total_steps": 1757, "loss": 0.1357, "lr": 1.108118728935459e-07, "epoch": 6.792828685258964, "percentage": 97.04, "elapsed_time": "2:13:51", "remaining_time": "0:04:04"}
342
+ {"current_steps": 1710, "total_steps": 1757, "loss": 0.1314, "lr": 9.09053008664329e-08, "epoch": 6.812749003984064, "percentage": 97.32, "elapsed_time": "2:14:10", "remaining_time": "0:03:41"}
343
+ {"current_steps": 1715, "total_steps": 1757, "loss": 0.1486, "lr": 7.296400666490844e-08, "epoch": 6.8326693227091635, "percentage": 97.61, "elapsed_time": "2:14:29", "remaining_time": "0:03:17"}
344
+ {"current_steps": 1720, "total_steps": 1757, "loss": 0.1264, "lr": 5.6989761320160874e-08, "epoch": 6.852589641434263, "percentage": 97.89, "elapsed_time": "2:14:50", "remaining_time": "0:02:54"}
345
+ {"current_steps": 1725, "total_steps": 1757, "loss": 0.1306, "lr": 4.298414169093601e-08, "epoch": 6.872509960159363, "percentage": 98.18, "elapsed_time": "2:15:09", "remaining_time": "0:02:30"}
346
+ {"current_steps": 1730, "total_steps": 1757, "loss": 0.1379, "lr": 3.094853030788825e-08, "epoch": 6.892430278884462, "percentage": 98.46, "elapsed_time": "2:15:30", "remaining_time": "0:02:06"}
347
+ {"current_steps": 1735, "total_steps": 1757, "loss": 0.1324, "lr": 2.0884115237103098e-08, "epoch": 6.912350597609562, "percentage": 98.75, "elapsed_time": "2:15:49", "remaining_time": "0:01:43"}
348
+ {"current_steps": 1740, "total_steps": 1757, "loss": 0.1214, "lr": 1.279188996281766e-08, "epoch": 6.932270916334661, "percentage": 99.03, "elapsed_time": "2:16:06", "remaining_time": "0:01:19"}
349
+ {"current_steps": 1745, "total_steps": 1757, "loss": 0.1353, "lr": 6.6726532893501835e-09, "epoch": 6.952191235059761, "percentage": 99.32, "elapsed_time": "2:16:24", "remaining_time": "0:00:56"}
350
+ {"current_steps": 1750, "total_steps": 1757, "loss": 0.1391, "lr": 2.527009262258684e-09, "epoch": 6.972111553784861, "percentage": 99.6, "elapsed_time": "2:16:48", "remaining_time": "0:00:32"}
351
+ {"current_steps": 1755, "total_steps": 1757, "loss": 0.1213, "lr": 3.553671087019872e-10, "epoch": 6.99203187250996, "percentage": 99.89, "elapsed_time": "2:17:12", "remaining_time": "0:00:09"}
352
+ {"current_steps": 1757, "total_steps": 1757, "epoch": 7.0, "percentage": 100.0, "elapsed_time": "2:18:15", "remaining_time": "0:00:00"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb853749962375f0a7a6dcf3cf156b04b2b5cab50e35faf7ac9f48d73738f074
3
+ size 8593
vocab.json ADDED
The diff for this file is too large to render. See raw diff