Training in progress, step 1500
Browse files- .gitattributes +1 -0
- added_tokens.json +28 -0
- chat_template.jinja +89 -0
- config.json +68 -0
- generation_config.json +12 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +407 -0
- run_summary.json +12 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +240 -0
- trainer_log.jsonl +306 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</think>": 151668,
|
| 3 |
+
"</tool_call>": 151658,
|
| 4 |
+
"</tool_response>": 151666,
|
| 5 |
+
"<think>": 151667,
|
| 6 |
+
"<tool_call>": 151657,
|
| 7 |
+
"<tool_response>": 151665,
|
| 8 |
+
"<|box_end|>": 151649,
|
| 9 |
+
"<|box_start|>": 151648,
|
| 10 |
+
"<|endoftext|>": 151643,
|
| 11 |
+
"<|file_sep|>": 151664,
|
| 12 |
+
"<|fim_middle|>": 151660,
|
| 13 |
+
"<|fim_pad|>": 151662,
|
| 14 |
+
"<|fim_prefix|>": 151659,
|
| 15 |
+
"<|fim_suffix|>": 151661,
|
| 16 |
+
"<|im_end|>": 151645,
|
| 17 |
+
"<|im_start|>": 151644,
|
| 18 |
+
"<|image_pad|>": 151655,
|
| 19 |
+
"<|object_ref_end|>": 151647,
|
| 20 |
+
"<|object_ref_start|>": 151646,
|
| 21 |
+
"<|quad_end|>": 151651,
|
| 22 |
+
"<|quad_start|>": 151650,
|
| 23 |
+
"<|repo_name|>": 151663,
|
| 24 |
+
"<|video_pad|>": 151656,
|
| 25 |
+
"<|vision_end|>": 151653,
|
| 26 |
+
"<|vision_pad|>": 151654,
|
| 27 |
+
"<|vision_start|>": 151652
|
| 28 |
+
}
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{%- if tools %}
|
| 2 |
+
{{- '<|im_start|>system\n' }}
|
| 3 |
+
{%- if messages[0].role == 'system' %}
|
| 4 |
+
{{- messages[0].content + '\n\n' }}
|
| 5 |
+
{%- endif %}
|
| 6 |
+
{{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
|
| 7 |
+
{%- for tool in tools %}
|
| 8 |
+
{{- "\n" }}
|
| 9 |
+
{{- tool | tojson }}
|
| 10 |
+
{%- endfor %}
|
| 11 |
+
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
|
| 12 |
+
{%- else %}
|
| 13 |
+
{%- if messages[0].role == 'system' %}
|
| 14 |
+
{{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
|
| 15 |
+
{%- endif %}
|
| 16 |
+
{%- endif %}
|
| 17 |
+
{%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
|
| 18 |
+
{%- for message in messages[::-1] %}
|
| 19 |
+
{%- set index = (messages|length - 1) - loop.index0 %}
|
| 20 |
+
{%- if ns.multi_step_tool and message.role == "user" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}
|
| 21 |
+
{%- set ns.multi_step_tool = false %}
|
| 22 |
+
{%- set ns.last_query_index = index %}
|
| 23 |
+
{%- endif %}
|
| 24 |
+
{%- endfor %}
|
| 25 |
+
{%- for message in messages %}
|
| 26 |
+
{%- if message.content is string %}
|
| 27 |
+
{%- set content = message.content %}
|
| 28 |
+
{%- else %}
|
| 29 |
+
{%- set content = '' %}
|
| 30 |
+
{%- endif %}
|
| 31 |
+
{%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
|
| 32 |
+
{{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
|
| 33 |
+
{%- elif message.role == "assistant" %}
|
| 34 |
+
{%- set reasoning_content = '' %}
|
| 35 |
+
{%- if message.reasoning_content is string %}
|
| 36 |
+
{%- set reasoning_content = message.reasoning_content %}
|
| 37 |
+
{%- else %}
|
| 38 |
+
{%- if '</think>' in content %}
|
| 39 |
+
{%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
|
| 40 |
+
{%- set content = content.split('</think>')[-1].lstrip('\n') %}
|
| 41 |
+
{%- endif %}
|
| 42 |
+
{%- endif %}
|
| 43 |
+
{%- if loop.index0 > ns.last_query_index %}
|
| 44 |
+
{%- if loop.last or (not loop.last and reasoning_content) %}
|
| 45 |
+
{{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}
|
| 46 |
+
{%- else %}
|
| 47 |
+
{{- '<|im_start|>' + message.role + '\n' + content }}
|
| 48 |
+
{%- endif %}
|
| 49 |
+
{%- else %}
|
| 50 |
+
{{- '<|im_start|>' + message.role + '\n' + content }}
|
| 51 |
+
{%- endif %}
|
| 52 |
+
{%- if message.tool_calls %}
|
| 53 |
+
{%- for tool_call in message.tool_calls %}
|
| 54 |
+
{%- if (loop.first and content) or (not loop.first) %}
|
| 55 |
+
{{- '\n' }}
|
| 56 |
+
{%- endif %}
|
| 57 |
+
{%- if tool_call.function %}
|
| 58 |
+
{%- set tool_call = tool_call.function %}
|
| 59 |
+
{%- endif %}
|
| 60 |
+
{{- '<tool_call>\n{"name": "' }}
|
| 61 |
+
{{- tool_call.name }}
|
| 62 |
+
{{- '", "arguments": ' }}
|
| 63 |
+
{%- if tool_call.arguments is string %}
|
| 64 |
+
{{- tool_call.arguments }}
|
| 65 |
+
{%- else %}
|
| 66 |
+
{{- tool_call.arguments | tojson }}
|
| 67 |
+
{%- endif %}
|
| 68 |
+
{{- '}\n</tool_call>' }}
|
| 69 |
+
{%- endfor %}
|
| 70 |
+
{%- endif %}
|
| 71 |
+
{{- '<|im_end|>\n' }}
|
| 72 |
+
{%- elif message.role == "tool" %}
|
| 73 |
+
{%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
|
| 74 |
+
{{- '<|im_start|>user' }}
|
| 75 |
+
{%- endif %}
|
| 76 |
+
{{- '\n<tool_response>\n' }}
|
| 77 |
+
{{- content }}
|
| 78 |
+
{{- '\n</tool_response>' }}
|
| 79 |
+
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
|
| 80 |
+
{{- '<|im_end|>\n' }}
|
| 81 |
+
{%- endif %}
|
| 82 |
+
{%- endif %}
|
| 83 |
+
{%- endfor %}
|
| 84 |
+
{%- if add_generation_prompt %}
|
| 85 |
+
{{- '<|im_start|>assistant\n' }}
|
| 86 |
+
{%- if enable_thinking is defined and enable_thinking is false %}
|
| 87 |
+
{{- '<think>\n\n</think>\n\n' }}
|
| 88 |
+
{%- endif %}
|
| 89 |
+
{%- endif %}
|
config.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen3ForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_bias": false,
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"dtype": "bfloat16",
|
| 8 |
+
"eos_token_id": 151645,
|
| 9 |
+
"head_dim": 128,
|
| 10 |
+
"hidden_act": "silu",
|
| 11 |
+
"hidden_size": 4096,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 12288,
|
| 14 |
+
"layer_types": [
|
| 15 |
+
"full_attention",
|
| 16 |
+
"full_attention",
|
| 17 |
+
"full_attention",
|
| 18 |
+
"full_attention",
|
| 19 |
+
"full_attention",
|
| 20 |
+
"full_attention",
|
| 21 |
+
"full_attention",
|
| 22 |
+
"full_attention",
|
| 23 |
+
"full_attention",
|
| 24 |
+
"full_attention",
|
| 25 |
+
"full_attention",
|
| 26 |
+
"full_attention",
|
| 27 |
+
"full_attention",
|
| 28 |
+
"full_attention",
|
| 29 |
+
"full_attention",
|
| 30 |
+
"full_attention",
|
| 31 |
+
"full_attention",
|
| 32 |
+
"full_attention",
|
| 33 |
+
"full_attention",
|
| 34 |
+
"full_attention",
|
| 35 |
+
"full_attention",
|
| 36 |
+
"full_attention",
|
| 37 |
+
"full_attention",
|
| 38 |
+
"full_attention",
|
| 39 |
+
"full_attention",
|
| 40 |
+
"full_attention",
|
| 41 |
+
"full_attention",
|
| 42 |
+
"full_attention",
|
| 43 |
+
"full_attention",
|
| 44 |
+
"full_attention",
|
| 45 |
+
"full_attention",
|
| 46 |
+
"full_attention",
|
| 47 |
+
"full_attention",
|
| 48 |
+
"full_attention",
|
| 49 |
+
"full_attention",
|
| 50 |
+
"full_attention"
|
| 51 |
+
],
|
| 52 |
+
"max_position_embeddings": 40960,
|
| 53 |
+
"max_window_layers": 36,
|
| 54 |
+
"model_type": "qwen3",
|
| 55 |
+
"num_attention_heads": 32,
|
| 56 |
+
"num_hidden_layers": 36,
|
| 57 |
+
"num_key_value_heads": 8,
|
| 58 |
+
"pad_token_id": 151643,
|
| 59 |
+
"rms_norm_eps": 1e-06,
|
| 60 |
+
"rope_scaling": null,
|
| 61 |
+
"rope_theta": 1000000,
|
| 62 |
+
"sliding_window": null,
|
| 63 |
+
"tie_word_embeddings": false,
|
| 64 |
+
"transformers_version": "4.57.3",
|
| 65 |
+
"use_cache": false,
|
| 66 |
+
"use_sliding_window": false,
|
| 67 |
+
"vocab_size": 151936
|
| 68 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"do_sample": true,
|
| 3 |
+
"eos_token_id": [
|
| 4 |
+
151645,
|
| 5 |
+
151643
|
| 6 |
+
],
|
| 7 |
+
"pad_token_id": 151643,
|
| 8 |
+
"temperature": 0.6,
|
| 9 |
+
"top_k": 20,
|
| 10 |
+
"top_p": 0.95,
|
| 11 |
+
"transformers_version": "4.57.3"
|
| 12 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:dadff5100613856585a1913d2b91ad7852abffb3c19b0f6411d69a41f987e6bb
|
| 3 |
+
size 4902257696
|
model-00002-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ccd9cf8f11161a4becc57d3392bea467c0da0b3026ed9c63ae68b1a34e99d927
|
| 3 |
+
size 4915960368
|
model-00003-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b42890f8ae45e948f8816ff34a8c117d9b12f71ae3ada495b2a29a002d024659
|
| 3 |
+
size 4983068496
|
model-00004-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:15d4d625ae71e0411c1187071289ffefce6829aac0349eb0e3529d584cd6fe95
|
| 3 |
+
size 1580230264
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,407 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_parameters": 308224,
|
| 4 |
+
"total_size": 16381470720
|
| 5 |
+
},
|
| 6 |
+
"weight_map": {
|
| 7 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
| 8 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 31 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 32 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 33 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 34 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 35 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 36 |
+
"model.layers.10.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 42 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 43 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 44 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 45 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 46 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 47 |
+
"model.layers.11.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 48 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 53 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 54 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 55 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 56 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 57 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 58 |
+
"model.layers.12.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 59 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 60 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 64 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 65 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 66 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 67 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 68 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 69 |
+
"model.layers.13.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 70 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 71 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 72 |
+
"model.layers.13.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 75 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 76 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 77 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 78 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 79 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 80 |
+
"model.layers.14.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 81 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 82 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 83 |
+
"model.layers.14.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 84 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 86 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 87 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 88 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 89 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 90 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 91 |
+
"model.layers.15.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 92 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 93 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 94 |
+
"model.layers.15.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 95 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 96 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 97 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 98 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 99 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 100 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 101 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 102 |
+
"model.layers.16.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 103 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 104 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 105 |
+
"model.layers.16.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 106 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 107 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 108 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 109 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 110 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 111 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 112 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 113 |
+
"model.layers.17.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 114 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 115 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 116 |
+
"model.layers.17.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 117 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 118 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 119 |
+
"model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 120 |
+
"model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 121 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 122 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 123 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 124 |
+
"model.layers.18.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 125 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 126 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 127 |
+
"model.layers.18.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 128 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 129 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 130 |
+
"model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 131 |
+
"model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 132 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 133 |
+
"model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 134 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 135 |
+
"model.layers.19.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 136 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 137 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 138 |
+
"model.layers.19.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 139 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 140 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 141 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 142 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 143 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 144 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 145 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 146 |
+
"model.layers.2.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 147 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 148 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 149 |
+
"model.layers.2.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 150 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 151 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 152 |
+
"model.layers.20.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 153 |
+
"model.layers.20.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 154 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 155 |
+
"model.layers.20.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 156 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 157 |
+
"model.layers.20.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 158 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 159 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 160 |
+
"model.layers.20.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 161 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 162 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 163 |
+
"model.layers.21.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 164 |
+
"model.layers.21.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 165 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 166 |
+
"model.layers.21.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 167 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 168 |
+
"model.layers.21.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 169 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 170 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 171 |
+
"model.layers.21.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 172 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 173 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 174 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 175 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 176 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 177 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 178 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 179 |
+
"model.layers.22.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
|
| 180 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 181 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 182 |
+
"model.layers.22.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
|
| 183 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 184 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 185 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 186 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 187 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 188 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 189 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 190 |
+
"model.layers.23.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 191 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 193 |
+
"model.layers.23.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 194 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 195 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 197 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 198 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 199 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 200 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 201 |
+
"model.layers.24.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 202 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 203 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 204 |
+
"model.layers.24.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 205 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 206 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 207 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 208 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 209 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 210 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 211 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 212 |
+
"model.layers.25.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 213 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 214 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 215 |
+
"model.layers.25.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 216 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 217 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 218 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 219 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 220 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 221 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 222 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 223 |
+
"model.layers.26.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 224 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 225 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 226 |
+
"model.layers.26.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 227 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 228 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 229 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 230 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 231 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 232 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 233 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 234 |
+
"model.layers.27.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 235 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 236 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 237 |
+
"model.layers.27.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 238 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 239 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 240 |
+
"model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 241 |
+
"model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 242 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 243 |
+
"model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 244 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 245 |
+
"model.layers.28.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 246 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 247 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 248 |
+
"model.layers.28.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 249 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 250 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 251 |
+
"model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 252 |
+
"model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 253 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 254 |
+
"model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 255 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 256 |
+
"model.layers.29.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 257 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 258 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 259 |
+
"model.layers.29.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 260 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 261 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 262 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 263 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 264 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 265 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 266 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 272 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 273 |
+
"model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 274 |
+
"model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 275 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 276 |
+
"model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 277 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 278 |
+
"model.layers.30.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 279 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 280 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 281 |
+
"model.layers.30.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 282 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 283 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 284 |
+
"model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 285 |
+
"model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 286 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 287 |
+
"model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 288 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 289 |
+
"model.layers.31.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 290 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 291 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 292 |
+
"model.layers.31.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 293 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 294 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 295 |
+
"model.layers.32.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 296 |
+
"model.layers.32.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 297 |
+
"model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 298 |
+
"model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 299 |
+
"model.layers.32.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 300 |
+
"model.layers.32.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 301 |
+
"model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 302 |
+
"model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 303 |
+
"model.layers.32.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 304 |
+
"model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 305 |
+
"model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 306 |
+
"model.layers.33.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 307 |
+
"model.layers.33.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 308 |
+
"model.layers.33.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 309 |
+
"model.layers.33.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 310 |
+
"model.layers.33.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 311 |
+
"model.layers.33.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 312 |
+
"model.layers.33.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 313 |
+
"model.layers.33.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 314 |
+
"model.layers.33.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 315 |
+
"model.layers.33.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 316 |
+
"model.layers.33.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 317 |
+
"model.layers.34.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 318 |
+
"model.layers.34.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 319 |
+
"model.layers.34.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 320 |
+
"model.layers.34.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 321 |
+
"model.layers.34.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 322 |
+
"model.layers.34.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
|
| 323 |
+
"model.layers.34.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 324 |
+
"model.layers.34.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 325 |
+
"model.layers.34.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
|
| 326 |
+
"model.layers.34.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 327 |
+
"model.layers.34.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 328 |
+
"model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 329 |
+
"model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
|
| 330 |
+
"model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
|
| 331 |
+
"model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
|
| 332 |
+
"model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
|
| 333 |
+
"model.layers.35.self_attn.k_norm.weight": "model-00004-of-00004.safetensors",
|
| 334 |
+
"model.layers.35.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 335 |
+
"model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
|
| 336 |
+
"model.layers.35.self_attn.q_norm.weight": "model-00004-of-00004.safetensors",
|
| 337 |
+
"model.layers.35.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 338 |
+
"model.layers.35.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 339 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 340 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 341 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 342 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 343 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 344 |
+
"model.layers.4.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 345 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 346 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 347 |
+
"model.layers.4.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 348 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 349 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 350 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 351 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 352 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 353 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 354 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 355 |
+
"model.layers.5.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 356 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 357 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 358 |
+
"model.layers.5.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 359 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 360 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 361 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 362 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 363 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 364 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 365 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 366 |
+
"model.layers.6.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 367 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 368 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 369 |
+
"model.layers.6.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 370 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 371 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 372 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 373 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 374 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 375 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 376 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 377 |
+
"model.layers.7.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 378 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 379 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 380 |
+
"model.layers.7.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 381 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 382 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 383 |
+
"model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 384 |
+
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 385 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 386 |
+
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 387 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 388 |
+
"model.layers.8.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 389 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 390 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 391 |
+
"model.layers.8.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 392 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 393 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 394 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 395 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 396 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 397 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 398 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 399 |
+
"model.layers.9.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
|
| 400 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 401 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 402 |
+
"model.layers.9.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
|
| 403 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 404 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 405 |
+
"model.norm.weight": "model-00004-of-00004.safetensors"
|
| 406 |
+
}
|
| 407 |
+
}
|
run_summary.json
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"agent_name": "glm46-Toolscale-tasks-traces",
|
| 3 |
+
"training_start": null,
|
| 4 |
+
"training_end": null,
|
| 5 |
+
"created_by": "DCAgent",
|
| 6 |
+
"base_model_name": "Qwen/Qwen3-8B",
|
| 7 |
+
"dataset_name": "DCAgent/glm46-Toolscale-tasks-traces",
|
| 8 |
+
"training_type": "SFT",
|
| 9 |
+
"training_parameters": "https://huggingface.co/laion/glm46-Toolscale-tasks-traces/blob/main/config.json",
|
| 10 |
+
"wandb_link": "https://wandb.ai/dogml/OpenThoughts-Agent/runs/sft_glm46-Toolscale-tasks-traces_Qwen3-8B",
|
| 11 |
+
"traces_location_s3": null
|
| 12 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
|
| 3 |
+
size 11422654
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
},
|
| 181 |
+
"151665": {
|
| 182 |
+
"content": "<tool_response>",
|
| 183 |
+
"lstrip": false,
|
| 184 |
+
"normalized": false,
|
| 185 |
+
"rstrip": false,
|
| 186 |
+
"single_word": false,
|
| 187 |
+
"special": false
|
| 188 |
+
},
|
| 189 |
+
"151666": {
|
| 190 |
+
"content": "</tool_response>",
|
| 191 |
+
"lstrip": false,
|
| 192 |
+
"normalized": false,
|
| 193 |
+
"rstrip": false,
|
| 194 |
+
"single_word": false,
|
| 195 |
+
"special": false
|
| 196 |
+
},
|
| 197 |
+
"151667": {
|
| 198 |
+
"content": "<think>",
|
| 199 |
+
"lstrip": false,
|
| 200 |
+
"normalized": false,
|
| 201 |
+
"rstrip": false,
|
| 202 |
+
"single_word": false,
|
| 203 |
+
"special": false
|
| 204 |
+
},
|
| 205 |
+
"151668": {
|
| 206 |
+
"content": "</think>",
|
| 207 |
+
"lstrip": false,
|
| 208 |
+
"normalized": false,
|
| 209 |
+
"rstrip": false,
|
| 210 |
+
"single_word": false,
|
| 211 |
+
"special": false
|
| 212 |
+
}
|
| 213 |
+
},
|
| 214 |
+
"additional_special_tokens": [
|
| 215 |
+
"<|im_start|>",
|
| 216 |
+
"<|im_end|>",
|
| 217 |
+
"<|object_ref_start|>",
|
| 218 |
+
"<|object_ref_end|>",
|
| 219 |
+
"<|box_start|>",
|
| 220 |
+
"<|box_end|>",
|
| 221 |
+
"<|quad_start|>",
|
| 222 |
+
"<|quad_end|>",
|
| 223 |
+
"<|vision_start|>",
|
| 224 |
+
"<|vision_end|>",
|
| 225 |
+
"<|vision_pad|>",
|
| 226 |
+
"<|image_pad|>",
|
| 227 |
+
"<|video_pad|>"
|
| 228 |
+
],
|
| 229 |
+
"bos_token": null,
|
| 230 |
+
"clean_up_tokenization_spaces": false,
|
| 231 |
+
"eos_token": "<|im_end|>",
|
| 232 |
+
"errors": "replace",
|
| 233 |
+
"extra_special_tokens": {},
|
| 234 |
+
"model_max_length": 32768,
|
| 235 |
+
"pad_token": "<|endoftext|>",
|
| 236 |
+
"padding_side": "right",
|
| 237 |
+
"split_special_tokens": false,
|
| 238 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 239 |
+
"unk_token": null
|
| 240 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 5, "total_steps": 1778, "loss": 0.7475, "lr": 8.98876404494382e-07, "epoch": 0.01968503937007874, "percentage": 0.28, "elapsed_time": "0:00:59", "remaining_time": "5:50:16"}
|
| 2 |
+
{"current_steps": 10, "total_steps": 1778, "loss": 0.7279, "lr": 2.02247191011236e-06, "epoch": 0.03937007874015748, "percentage": 0.56, "elapsed_time": "0:01:45", "remaining_time": "5:11:02"}
|
| 3 |
+
{"current_steps": 15, "total_steps": 1778, "loss": 0.6706, "lr": 3.146067415730337e-06, "epoch": 0.05905511811023622, "percentage": 0.84, "elapsed_time": "0:02:38", "remaining_time": "5:09:58"}
|
| 4 |
+
{"current_steps": 20, "total_steps": 1778, "loss": 0.6165, "lr": 4.269662921348315e-06, "epoch": 0.07874015748031496, "percentage": 1.12, "elapsed_time": "0:03:22", "remaining_time": "4:56:51"}
|
| 5 |
+
{"current_steps": 25, "total_steps": 1778, "loss": 0.5435, "lr": 5.393258426966292e-06, "epoch": 0.0984251968503937, "percentage": 1.41, "elapsed_time": "0:04:16", "remaining_time": "4:59:24"}
|
| 6 |
+
{"current_steps": 30, "total_steps": 1778, "loss": 0.5004, "lr": 6.51685393258427e-06, "epoch": 0.11811023622047244, "percentage": 1.69, "elapsed_time": "0:05:08", "remaining_time": "4:59:50"}
|
| 7 |
+
{"current_steps": 35, "total_steps": 1778, "loss": 0.4736, "lr": 7.640449438202247e-06, "epoch": 0.1377952755905512, "percentage": 1.97, "elapsed_time": "0:05:53", "remaining_time": "4:53:06"}
|
| 8 |
+
{"current_steps": 40, "total_steps": 1778, "loss": 0.4324, "lr": 8.764044943820226e-06, "epoch": 0.15748031496062992, "percentage": 2.25, "elapsed_time": "0:06:45", "remaining_time": "4:53:52"}
|
| 9 |
+
{"current_steps": 45, "total_steps": 1778, "loss": 0.4114, "lr": 9.887640449438202e-06, "epoch": 0.17716535433070865, "percentage": 2.53, "elapsed_time": "0:07:36", "remaining_time": "4:53:16"}
|
| 10 |
+
{"current_steps": 50, "total_steps": 1778, "loss": 0.4005, "lr": 1.101123595505618e-05, "epoch": 0.1968503937007874, "percentage": 2.81, "elapsed_time": "0:08:27", "remaining_time": "4:52:34"}
|
| 11 |
+
{"current_steps": 55, "total_steps": 1778, "loss": 0.365, "lr": 1.213483146067416e-05, "epoch": 0.21653543307086615, "percentage": 3.09, "elapsed_time": "0:09:16", "remaining_time": "4:50:40"}
|
| 12 |
+
{"current_steps": 60, "total_steps": 1778, "loss": 0.3529, "lr": 1.3258426966292135e-05, "epoch": 0.23622047244094488, "percentage": 3.37, "elapsed_time": "0:10:05", "remaining_time": "4:49:07"}
|
| 13 |
+
{"current_steps": 65, "total_steps": 1778, "loss": 0.3551, "lr": 1.4382022471910113e-05, "epoch": 0.2559055118110236, "percentage": 3.66, "elapsed_time": "0:10:50", "remaining_time": "4:45:33"}
|
| 14 |
+
{"current_steps": 70, "total_steps": 1778, "loss": 0.3442, "lr": 1.5505617977528093e-05, "epoch": 0.2755905511811024, "percentage": 3.94, "elapsed_time": "0:11:34", "remaining_time": "4:42:18"}
|
| 15 |
+
{"current_steps": 75, "total_steps": 1778, "loss": 0.3233, "lr": 1.662921348314607e-05, "epoch": 0.2952755905511811, "percentage": 4.22, "elapsed_time": "0:12:20", "remaining_time": "4:40:10"}
|
| 16 |
+
{"current_steps": 80, "total_steps": 1778, "loss": 0.3186, "lr": 1.7752808988764045e-05, "epoch": 0.31496062992125984, "percentage": 4.5, "elapsed_time": "0:13:12", "remaining_time": "4:40:26"}
|
| 17 |
+
{"current_steps": 85, "total_steps": 1778, "loss": 0.3115, "lr": 1.8876404494382024e-05, "epoch": 0.3346456692913386, "percentage": 4.78, "elapsed_time": "0:14:01", "remaining_time": "4:39:18"}
|
| 18 |
+
{"current_steps": 90, "total_steps": 1778, "loss": 0.3042, "lr": 2e-05, "epoch": 0.3543307086614173, "percentage": 5.06, "elapsed_time": "0:14:48", "remaining_time": "4:37:52"}
|
| 19 |
+
{"current_steps": 95, "total_steps": 1778, "loss": 0.3107, "lr": 2.1123595505617976e-05, "epoch": 0.37401574803149606, "percentage": 5.34, "elapsed_time": "0:15:33", "remaining_time": "4:35:36"}
|
| 20 |
+
{"current_steps": 100, "total_steps": 1778, "loss": 0.3049, "lr": 2.2247191011235958e-05, "epoch": 0.3937007874015748, "percentage": 5.62, "elapsed_time": "0:16:14", "remaining_time": "4:32:29"}
|
| 21 |
+
{"current_steps": 105, "total_steps": 1778, "loss": 0.2907, "lr": 2.3370786516853933e-05, "epoch": 0.41338582677165353, "percentage": 5.91, "elapsed_time": "0:17:02", "remaining_time": "4:31:38"}
|
| 22 |
+
{"current_steps": 110, "total_steps": 1778, "loss": 0.2891, "lr": 2.4494382022471914e-05, "epoch": 0.4330708661417323, "percentage": 6.19, "elapsed_time": "0:17:58", "remaining_time": "4:32:33"}
|
| 23 |
+
{"current_steps": 115, "total_steps": 1778, "loss": 0.2806, "lr": 2.561797752808989e-05, "epoch": 0.452755905511811, "percentage": 6.47, "elapsed_time": "0:18:55", "remaining_time": "4:33:35"}
|
| 24 |
+
{"current_steps": 120, "total_steps": 1778, "loss": 0.2962, "lr": 2.6741573033707867e-05, "epoch": 0.47244094488188976, "percentage": 6.75, "elapsed_time": "0:19:42", "remaining_time": "4:32:22"}
|
| 25 |
+
{"current_steps": 125, "total_steps": 1778, "loss": 0.2722, "lr": 2.7865168539325845e-05, "epoch": 0.4921259842519685, "percentage": 7.03, "elapsed_time": "0:20:27", "remaining_time": "4:30:38"}
|
| 26 |
+
{"current_steps": 130, "total_steps": 1778, "loss": 0.2701, "lr": 2.8988764044943823e-05, "epoch": 0.5118110236220472, "percentage": 7.31, "elapsed_time": "0:21:14", "remaining_time": "4:29:17"}
|
| 27 |
+
{"current_steps": 135, "total_steps": 1778, "loss": 0.2808, "lr": 3.0112359550561798e-05, "epoch": 0.531496062992126, "percentage": 7.59, "elapsed_time": "0:22:01", "remaining_time": "4:28:01"}
|
| 28 |
+
{"current_steps": 140, "total_steps": 1778, "loss": 0.2634, "lr": 3.123595505617978e-05, "epoch": 0.5511811023622047, "percentage": 7.87, "elapsed_time": "0:22:52", "remaining_time": "4:27:34"}
|
| 29 |
+
{"current_steps": 145, "total_steps": 1778, "loss": 0.2745, "lr": 3.235955056179776e-05, "epoch": 0.5708661417322834, "percentage": 8.16, "elapsed_time": "0:23:36", "remaining_time": "4:25:48"}
|
| 30 |
+
{"current_steps": 150, "total_steps": 1778, "loss": 0.2669, "lr": 3.3483146067415736e-05, "epoch": 0.5905511811023622, "percentage": 8.44, "elapsed_time": "0:24:27", "remaining_time": "4:25:26"}
|
| 31 |
+
{"current_steps": 155, "total_steps": 1778, "loss": 0.2702, "lr": 3.4606741573033714e-05, "epoch": 0.610236220472441, "percentage": 8.72, "elapsed_time": "0:25:15", "remaining_time": "4:24:33"}
|
| 32 |
+
{"current_steps": 160, "total_steps": 1778, "loss": 0.262, "lr": 3.5730337078651685e-05, "epoch": 0.6299212598425197, "percentage": 9.0, "elapsed_time": "0:25:59", "remaining_time": "4:22:48"}
|
| 33 |
+
{"current_steps": 165, "total_steps": 1778, "loss": 0.2688, "lr": 3.685393258426967e-05, "epoch": 0.6496062992125984, "percentage": 9.28, "elapsed_time": "0:26:44", "remaining_time": "4:21:20"}
|
| 34 |
+
{"current_steps": 170, "total_steps": 1778, "loss": 0.2609, "lr": 3.797752808988764e-05, "epoch": 0.6692913385826772, "percentage": 9.56, "elapsed_time": "0:27:24", "remaining_time": "4:19:12"}
|
| 35 |
+
{"current_steps": 175, "total_steps": 1778, "loss": 0.2599, "lr": 3.910112359550562e-05, "epoch": 0.6889763779527559, "percentage": 9.84, "elapsed_time": "0:28:05", "remaining_time": "4:17:16"}
|
| 36 |
+
{"current_steps": 180, "total_steps": 1778, "loss": 0.2671, "lr": 3.999996144687019e-05, "epoch": 0.7086614173228346, "percentage": 10.12, "elapsed_time": "0:28:50", "remaining_time": "4:16:06"}
|
| 37 |
+
{"current_steps": 185, "total_steps": 1778, "loss": 0.2643, "lr": 3.9998612102933544e-05, "epoch": 0.7283464566929134, "percentage": 10.4, "elapsed_time": "0:29:33", "remaining_time": "4:14:30"}
|
| 38 |
+
{"current_steps": 190, "total_steps": 1778, "loss": 0.2687, "lr": 3.999533525113843e-05, "epoch": 0.7480314960629921, "percentage": 10.69, "elapsed_time": "0:30:21", "remaining_time": "4:13:45"}
|
| 39 |
+
{"current_steps": 195, "total_steps": 1778, "loss": 0.2573, "lr": 3.9990131207314634e-05, "epoch": 0.7677165354330708, "percentage": 10.97, "elapsed_time": "0:31:08", "remaining_time": "4:12:50"}
|
| 40 |
+
{"current_steps": 200, "total_steps": 1778, "loss": 0.2554, "lr": 3.998300047303874e-05, "epoch": 0.7874015748031497, "percentage": 11.25, "elapsed_time": "0:31:54", "remaining_time": "4:11:42"}
|
| 41 |
+
{"current_steps": 205, "total_steps": 1778, "loss": 0.2572, "lr": 3.997394373558576e-05, "epoch": 0.8070866141732284, "percentage": 11.53, "elapsed_time": "0:32:41", "remaining_time": "4:10:47"}
|
| 42 |
+
{"current_steps": 210, "total_steps": 1778, "loss": 0.2506, "lr": 3.9962961867862894e-05, "epoch": 0.8267716535433071, "percentage": 11.81, "elapsed_time": "0:33:23", "remaining_time": "4:09:20"}
|
| 43 |
+
{"current_steps": 215, "total_steps": 1778, "loss": 0.2435, "lr": 3.995005592832541e-05, "epoch": 0.8464566929133859, "percentage": 12.09, "elapsed_time": "0:34:11", "remaining_time": "4:08:33"}
|
| 44 |
+
{"current_steps": 220, "total_steps": 1778, "loss": 0.2638, "lr": 3.993522716087462e-05, "epoch": 0.8661417322834646, "percentage": 12.37, "elapsed_time": "0:34:51", "remaining_time": "4:06:54"}
|
| 45 |
+
{"current_steps": 225, "total_steps": 1778, "loss": 0.2617, "lr": 3.991847699473801e-05, "epoch": 0.8858267716535433, "percentage": 12.65, "elapsed_time": "0:35:35", "remaining_time": "4:05:39"}
|
| 46 |
+
{"current_steps": 230, "total_steps": 1778, "loss": 0.2487, "lr": 3.989980704433144e-05, "epoch": 0.905511811023622, "percentage": 12.94, "elapsed_time": "0:36:30", "remaining_time": "4:05:41"}
|
| 47 |
+
{"current_steps": 235, "total_steps": 1778, "loss": 0.2435, "lr": 3.98792191091036e-05, "epoch": 0.9251968503937008, "percentage": 13.22, "elapsed_time": "0:37:13", "remaining_time": "4:04:25"}
|
| 48 |
+
{"current_steps": 240, "total_steps": 1778, "loss": 0.2454, "lr": 3.9856715173362527e-05, "epoch": 0.9448818897637795, "percentage": 13.5, "elapsed_time": "0:38:01", "remaining_time": "4:03:38"}
|
| 49 |
+
{"current_steps": 245, "total_steps": 1778, "loss": 0.2448, "lr": 3.9832297406084386e-05, "epoch": 0.9645669291338582, "percentage": 13.78, "elapsed_time": "0:38:53", "remaining_time": "4:03:24"}
|
| 50 |
+
{"current_steps": 250, "total_steps": 1778, "loss": 0.246, "lr": 3.980596816070442e-05, "epoch": 0.984251968503937, "percentage": 14.06, "elapsed_time": "0:39:40", "remaining_time": "4:02:31"}
|
| 51 |
+
{"current_steps": 255, "total_steps": 1778, "loss": 0.251, "lr": 3.97777299748901e-05, "epoch": 1.0039370078740157, "percentage": 14.34, "elapsed_time": "0:40:25", "remaining_time": "4:01:25"}
|
| 52 |
+
{"current_steps": 260, "total_steps": 1778, "loss": 0.235, "lr": 3.974758557029653e-05, "epoch": 1.0236220472440944, "percentage": 14.62, "elapsed_time": "0:41:11", "remaining_time": "4:00:31"}
|
| 53 |
+
{"current_steps": 265, "total_steps": 1778, "loss": 0.238, "lr": 3.971553785230418e-05, "epoch": 1.0433070866141732, "percentage": 14.9, "elapsed_time": "0:42:02", "remaining_time": "4:00:04"}
|
| 54 |
+
{"current_steps": 270, "total_steps": 1778, "loss": 0.23, "lr": 3.968158990973881e-05, "epoch": 1.0629921259842519, "percentage": 15.19, "elapsed_time": "0:42:47", "remaining_time": "3:59:00"}
|
| 55 |
+
{"current_steps": 275, "total_steps": 1778, "loss": 0.2364, "lr": 3.964574501457378e-05, "epoch": 1.0826771653543308, "percentage": 15.47, "elapsed_time": "0:43:35", "remaining_time": "3:58:16"}
|
| 56 |
+
{"current_steps": 280, "total_steps": 1778, "loss": 0.2415, "lr": 3.960800662161469e-05, "epoch": 1.1023622047244095, "percentage": 15.75, "elapsed_time": "0:44:17", "remaining_time": "3:56:58"}
|
| 57 |
+
{"current_steps": 285, "total_steps": 1778, "loss": 0.2412, "lr": 3.9568378368166406e-05, "epoch": 1.1220472440944882, "percentage": 16.03, "elapsed_time": "0:45:02", "remaining_time": "3:55:57"}
|
| 58 |
+
{"current_steps": 290, "total_steps": 1778, "loss": 0.2241, "lr": 3.952686407368247e-05, "epoch": 1.141732283464567, "percentage": 16.31, "elapsed_time": "0:45:50", "remaining_time": "3:55:11"}
|
| 59 |
+
{"current_steps": 295, "total_steps": 1778, "loss": 0.2301, "lr": 3.948346773939699e-05, "epoch": 1.1614173228346456, "percentage": 16.59, "elapsed_time": "0:46:32", "remaining_time": "3:53:58"}
|
| 60 |
+
{"current_steps": 300, "total_steps": 1778, "loss": 0.2291, "lr": 3.943819354793899e-05, "epoch": 1.1811023622047245, "percentage": 16.87, "elapsed_time": "0:47:13", "remaining_time": "3:52:42"}
|
| 61 |
+
{"current_steps": 305, "total_steps": 1778, "loss": 0.2295, "lr": 3.9391045862929275e-05, "epoch": 1.2007874015748032, "percentage": 17.15, "elapsed_time": "0:48:01", "remaining_time": "3:51:54"}
|
| 62 |
+
{"current_steps": 310, "total_steps": 1778, "loss": 0.231, "lr": 3.934202922855985e-05, "epoch": 1.220472440944882, "percentage": 17.44, "elapsed_time": "0:48:42", "remaining_time": "3:50:40"}
|
| 63 |
+
{"current_steps": 315, "total_steps": 1778, "loss": 0.2317, "lr": 3.9291148369155964e-05, "epoch": 1.2401574803149606, "percentage": 17.72, "elapsed_time": "0:49:23", "remaining_time": "3:49:24"}
|
| 64 |
+
{"current_steps": 320, "total_steps": 1778, "loss": 0.228, "lr": 3.9238408188720745e-05, "epoch": 1.2598425196850394, "percentage": 18.0, "elapsed_time": "0:50:11", "remaining_time": "3:48:43"}
|
| 65 |
+
{"current_steps": 325, "total_steps": 1778, "loss": 0.2295, "lr": 3.918381377046255e-05, "epoch": 1.279527559055118, "percentage": 18.28, "elapsed_time": "0:50:55", "remaining_time": "3:47:40"}
|
| 66 |
+
{"current_steps": 330, "total_steps": 1778, "loss": 0.2284, "lr": 3.9127370376305045e-05, "epoch": 1.2992125984251968, "percentage": 18.56, "elapsed_time": "0:51:46", "remaining_time": "3:47:11"}
|
| 67 |
+
{"current_steps": 335, "total_steps": 1778, "loss": 0.2313, "lr": 3.906908344638002e-05, "epoch": 1.3188976377952755, "percentage": 18.84, "elapsed_time": "0:52:27", "remaining_time": "3:45:56"}
|
| 68 |
+
{"current_steps": 340, "total_steps": 1778, "loss": 0.2177, "lr": 3.900895859850313e-05, "epoch": 1.3385826771653544, "percentage": 19.12, "elapsed_time": "0:53:12", "remaining_time": "3:45:01"}
|
| 69 |
+
{"current_steps": 345, "total_steps": 1778, "loss": 0.2347, "lr": 3.8947001627632326e-05, "epoch": 1.358267716535433, "percentage": 19.4, "elapsed_time": "0:53:56", "remaining_time": "3:44:02"}
|
| 70 |
+
{"current_steps": 350, "total_steps": 1778, "loss": 0.228, "lr": 3.888321850530943e-05, "epoch": 1.3779527559055118, "percentage": 19.69, "elapsed_time": "0:54:50", "remaining_time": "3:43:44"}
|
| 71 |
+
{"current_steps": 355, "total_steps": 1778, "loss": 0.2347, "lr": 3.8817615379084514e-05, "epoch": 1.3976377952755905, "percentage": 19.97, "elapsed_time": "0:55:31", "remaining_time": "3:42:35"}
|
| 72 |
+
{"current_steps": 360, "total_steps": 1778, "loss": 0.2254, "lr": 3.875019857192343e-05, "epoch": 1.4173228346456692, "percentage": 20.25, "elapsed_time": "0:56:10", "remaining_time": "3:41:14"}
|
| 73 |
+
{"current_steps": 365, "total_steps": 1778, "loss": 0.2292, "lr": 3.8680974581598375e-05, "epoch": 1.4370078740157481, "percentage": 20.53, "elapsed_time": "0:56:56", "remaining_time": "3:40:26"}
|
| 74 |
+
{"current_steps": 370, "total_steps": 1778, "loss": 0.228, "lr": 3.860995008006161e-05, "epoch": 1.4566929133858268, "percentage": 20.81, "elapsed_time": "0:57:41", "remaining_time": "3:39:32"}
|
| 75 |
+
{"current_steps": 375, "total_steps": 1778, "loss": 0.2268, "lr": 3.853713191280242e-05, "epoch": 1.4763779527559056, "percentage": 21.09, "elapsed_time": "0:58:21", "remaining_time": "3:38:20"}
|
| 76 |
+
{"current_steps": 380, "total_steps": 1778, "loss": 0.2255, "lr": 3.846252709818733e-05, "epoch": 1.4960629921259843, "percentage": 21.37, "elapsed_time": "0:59:01", "remaining_time": "3:37:08"}
|
| 77 |
+
{"current_steps": 385, "total_steps": 1778, "loss": 0.225, "lr": 3.8386142826783645e-05, "epoch": 1.515748031496063, "percentage": 21.65, "elapsed_time": "0:59:47", "remaining_time": "3:36:21"}
|
| 78 |
+
{"current_steps": 390, "total_steps": 1778, "loss": 0.2239, "lr": 3.830798646066642e-05, "epoch": 1.5354330708661417, "percentage": 21.93, "elapsed_time": "1:00:44", "remaining_time": "3:36:11"}
|
| 79 |
+
{"current_steps": 395, "total_steps": 1778, "loss": 0.2211, "lr": 3.8228065532708905e-05, "epoch": 1.5551181102362204, "percentage": 22.22, "elapsed_time": "1:01:31", "remaining_time": "3:35:26"}
|
| 80 |
+
{"current_steps": 400, "total_steps": 1778, "loss": 0.2215, "lr": 3.814638774585648e-05, "epoch": 1.574803149606299, "percentage": 22.5, "elapsed_time": "1:02:11", "remaining_time": "3:34:14"}
|
| 81 |
+
{"current_steps": 405, "total_steps": 1778, "loss": 0.2339, "lr": 3.8062960972384223e-05, "epoch": 1.594488188976378, "percentage": 22.78, "elapsed_time": "1:03:00", "remaining_time": "3:33:37"}
|
| 82 |
+
{"current_steps": 410, "total_steps": 1778, "loss": 0.2194, "lr": 3.797779325313822e-05, "epoch": 1.6141732283464567, "percentage": 23.06, "elapsed_time": "1:03:46", "remaining_time": "3:32:47"}
|
| 83 |
+
{"current_steps": 415, "total_steps": 1778, "loss": 0.2261, "lr": 3.78908927967605e-05, "epoch": 1.6338582677165354, "percentage": 23.34, "elapsed_time": "1:04:39", "remaining_time": "3:32:20"}
|
| 84 |
+
{"current_steps": 420, "total_steps": 1778, "loss": 0.2279, "lr": 3.780226797889794e-05, "epoch": 1.6535433070866141, "percentage": 23.62, "elapsed_time": "1:05:19", "remaining_time": "3:31:12"}
|
| 85 |
+
{"current_steps": 425, "total_steps": 1778, "loss": 0.2264, "lr": 3.7711927341394916e-05, "epoch": 1.673228346456693, "percentage": 23.9, "elapsed_time": "1:06:03", "remaining_time": "3:30:18"}
|
| 86 |
+
{"current_steps": 430, "total_steps": 1778, "loss": 0.227, "lr": 3.761987959147012e-05, "epoch": 1.6929133858267718, "percentage": 24.18, "elapsed_time": "1:06:45", "remaining_time": "3:29:16"}
|
| 87 |
+
{"current_steps": 435, "total_steps": 1778, "loss": 0.2231, "lr": 3.7526133600877275e-05, "epoch": 1.7125984251968505, "percentage": 24.47, "elapsed_time": "1:07:39", "remaining_time": "3:28:54"}
|
| 88 |
+
{"current_steps": 440, "total_steps": 1778, "loss": 0.2237, "lr": 3.743069840505006e-05, "epoch": 1.7322834645669292, "percentage": 24.75, "elapsed_time": "1:08:26", "remaining_time": "3:28:06"}
|
| 89 |
+
{"current_steps": 445, "total_steps": 1778, "loss": 0.2219, "lr": 3.733358320223128e-05, "epoch": 1.7519685039370079, "percentage": 25.03, "elapsed_time": "1:09:19", "remaining_time": "3:27:40"}
|
| 90 |
+
{"current_steps": 450, "total_steps": 1778, "loss": 0.2207, "lr": 3.723479735258633e-05, "epoch": 1.7716535433070866, "percentage": 25.31, "elapsed_time": "1:10:09", "remaining_time": "3:27:03"}
|
| 91 |
+
{"current_steps": 455, "total_steps": 1778, "loss": 0.2159, "lr": 3.7134350377301e-05, "epoch": 1.7913385826771653, "percentage": 25.59, "elapsed_time": "1:10:53", "remaining_time": "3:26:08"}
|
| 92 |
+
{"current_steps": 460, "total_steps": 1778, "loss": 0.2145, "lr": 3.703225195766382e-05, "epoch": 1.811023622047244, "percentage": 25.87, "elapsed_time": "1:11:36", "remaining_time": "3:25:10"}
|
| 93 |
+
{"current_steps": 465, "total_steps": 1778, "loss": 0.2195, "lr": 3.692851193413299e-05, "epoch": 1.8307086614173227, "percentage": 26.15, "elapsed_time": "1:12:18", "remaining_time": "3:24:11"}
|
| 94 |
+
{"current_steps": 470, "total_steps": 1778, "loss": 0.2226, "lr": 3.682314030538788e-05, "epoch": 1.8503937007874016, "percentage": 26.43, "elapsed_time": "1:12:57", "remaining_time": "3:23:01"}
|
| 95 |
+
{"current_steps": 475, "total_steps": 1778, "loss": 0.2275, "lr": 3.671614722736541e-05, "epoch": 1.8700787401574803, "percentage": 26.72, "elapsed_time": "1:13:43", "remaining_time": "3:22:14"}
|
| 96 |
+
{"current_steps": 480, "total_steps": 1778, "loss": 0.2196, "lr": 3.6607543012281106e-05, "epoch": 1.889763779527559, "percentage": 27.0, "elapsed_time": "1:14:19", "remaining_time": "3:20:59"}
|
| 97 |
+
{"current_steps": 485, "total_steps": 1778, "loss": 0.2159, "lr": 3.649733812763527e-05, "epoch": 1.909448818897638, "percentage": 27.28, "elapsed_time": "1:15:02", "remaining_time": "3:20:04"}
|
| 98 |
+
{"current_steps": 490, "total_steps": 1778, "loss": 0.2216, "lr": 3.638554319520406e-05, "epoch": 1.9291338582677167, "percentage": 27.56, "elapsed_time": "1:15:48", "remaining_time": "3:19:16"}
|
| 99 |
+
{"current_steps": 495, "total_steps": 1778, "loss": 0.2232, "lr": 3.627216899001575e-05, "epoch": 1.9488188976377954, "percentage": 27.84, "elapsed_time": "1:16:31", "remaining_time": "3:18:21"}
|
| 100 |
+
{"current_steps": 500, "total_steps": 1778, "loss": 0.2224, "lr": 3.6157226439312186e-05, "epoch": 1.968503937007874, "percentage": 28.12, "elapsed_time": "1:17:11", "remaining_time": "3:17:19"}
|
| 101 |
+
{"current_steps": 505, "total_steps": 1778, "loss": 0.2198, "lr": 3.604072662149567e-05, "epoch": 1.9881889763779528, "percentage": 28.4, "elapsed_time": "1:18:05", "remaining_time": "3:16:52"}
|
| 102 |
+
{"current_steps": 510, "total_steps": 1778, "loss": 0.2095, "lr": 3.5922680765061096e-05, "epoch": 2.0078740157480315, "percentage": 28.68, "elapsed_time": "1:18:56", "remaining_time": "3:16:16"}
|
| 103 |
+
{"current_steps": 515, "total_steps": 1778, "loss": 0.2057, "lr": 3.580310024751381e-05, "epoch": 2.02755905511811, "percentage": 28.97, "elapsed_time": "1:19:42", "remaining_time": "3:15:27"}
|
| 104 |
+
{"current_steps": 520, "total_steps": 1778, "loss": 0.2092, "lr": 3.568199659427298e-05, "epoch": 2.047244094488189, "percentage": 29.25, "elapsed_time": "1:20:26", "remaining_time": "3:14:36"}
|
| 105 |
+
{"current_steps": 525, "total_steps": 1778, "loss": 0.2056, "lr": 3.555938147756077e-05, "epoch": 2.0669291338582676, "percentage": 29.53, "elapsed_time": "1:21:08", "remaining_time": "3:13:40"}
|
| 106 |
+
{"current_steps": 530, "total_steps": 1778, "loss": 0.206, "lr": 3.543526671527733e-05, "epoch": 2.0866141732283463, "percentage": 29.81, "elapsed_time": "1:21:48", "remaining_time": "3:12:37"}
|
| 107 |
+
{"current_steps": 535, "total_steps": 1778, "loss": 0.205, "lr": 3.530966426986177e-05, "epoch": 2.106299212598425, "percentage": 30.09, "elapsed_time": "1:22:37", "remaining_time": "3:11:58"}
|
| 108 |
+
{"current_steps": 540, "total_steps": 1778, "loss": 0.2088, "lr": 3.51825862471392e-05, "epoch": 2.1259842519685037, "percentage": 30.37, "elapsed_time": "1:23:20", "remaining_time": "3:11:04"}
|
| 109 |
+
{"current_steps": 545, "total_steps": 1778, "loss": 0.1971, "lr": 3.505404489515394e-05, "epoch": 2.145669291338583, "percentage": 30.65, "elapsed_time": "1:24:03", "remaining_time": "3:10:11"}
|
| 110 |
+
{"current_steps": 550, "total_steps": 1778, "loss": 0.2093, "lr": 3.492405260298905e-05, "epoch": 2.1653543307086616, "percentage": 30.93, "elapsed_time": "1:24:51", "remaining_time": "3:09:28"}
|
| 111 |
+
{"current_steps": 555, "total_steps": 1778, "loss": 0.2076, "lr": 3.47926218995722e-05, "epoch": 2.1850393700787403, "percentage": 31.21, "elapsed_time": "1:25:25", "remaining_time": "3:08:14"}
|
| 112 |
+
{"current_steps": 560, "total_steps": 1778, "loss": 0.2046, "lr": 3.465976545246813e-05, "epoch": 2.204724409448819, "percentage": 31.5, "elapsed_time": "1:26:09", "remaining_time": "3:07:22"}
|
| 113 |
+
{"current_steps": 565, "total_steps": 1778, "loss": 0.2046, "lr": 3.4525496066657735e-05, "epoch": 2.2244094488188977, "percentage": 31.78, "elapsed_time": "1:26:53", "remaining_time": "3:06:32"}
|
| 114 |
+
{"current_steps": 570, "total_steps": 1778, "loss": 0.1999, "lr": 3.438982668330388e-05, "epoch": 2.2440944881889764, "percentage": 32.06, "elapsed_time": "1:27:39", "remaining_time": "3:05:47"}
|
| 115 |
+
{"current_steps": 575, "total_steps": 1778, "loss": 0.2142, "lr": 3.425277037850411e-05, "epoch": 2.263779527559055, "percentage": 32.34, "elapsed_time": "1:28:22", "remaining_time": "3:04:53"}
|
| 116 |
+
{"current_steps": 580, "total_steps": 1778, "loss": 0.2057, "lr": 3.411434036203035e-05, "epoch": 2.283464566929134, "percentage": 32.62, "elapsed_time": "1:29:02", "remaining_time": "3:03:54"}
|
| 117 |
+
{"current_steps": 585, "total_steps": 1778, "loss": 0.2116, "lr": 3.397454997605569e-05, "epoch": 2.3031496062992125, "percentage": 32.9, "elapsed_time": "1:29:48", "remaining_time": "3:03:08"}
|
| 118 |
+
{"current_steps": 590, "total_steps": 1778, "loss": 0.2004, "lr": 3.38334126938685e-05, "epoch": 2.322834645669291, "percentage": 33.18, "elapsed_time": "1:30:33", "remaining_time": "3:02:21"}
|
| 119 |
+
{"current_steps": 595, "total_steps": 1778, "loss": 0.1973, "lr": 3.369094211857378e-05, "epoch": 2.34251968503937, "percentage": 33.46, "elapsed_time": "1:31:18", "remaining_time": "3:01:32"}
|
| 120 |
+
{"current_steps": 600, "total_steps": 1778, "loss": 0.2044, "lr": 3.354715198178211e-05, "epoch": 2.362204724409449, "percentage": 33.75, "elapsed_time": "1:32:10", "remaining_time": "3:00:57"}
|
| 121 |
+
{"current_steps": 605, "total_steps": 1778, "loss": 0.208, "lr": 3.3402056142286156e-05, "epoch": 2.3818897637795278, "percentage": 34.03, "elapsed_time": "1:32:49", "remaining_time": "2:59:59"}
|
| 122 |
+
{"current_steps": 610, "total_steps": 1778, "loss": 0.2038, "lr": 3.3255668584724916e-05, "epoch": 2.4015748031496065, "percentage": 34.31, "elapsed_time": "1:33:38", "remaining_time": "2:59:18"}
|
| 123 |
+
{"current_steps": 615, "total_steps": 1778, "loss": 0.2071, "lr": 3.310800341823588e-05, "epoch": 2.421259842519685, "percentage": 34.59, "elapsed_time": "1:34:24", "remaining_time": "2:58:31"}
|
| 124 |
+
{"current_steps": 620, "total_steps": 1778, "loss": 0.2023, "lr": 3.2959074875095125e-05, "epoch": 2.440944881889764, "percentage": 34.87, "elapsed_time": "1:35:10", "remaining_time": "2:57:46"}
|
| 125 |
+
{"current_steps": 625, "total_steps": 1778, "loss": 0.2068, "lr": 3.280889730934562e-05, "epoch": 2.4606299212598426, "percentage": 35.15, "elapsed_time": "1:35:59", "remaining_time": "2:57:04"}
|
| 126 |
+
{"current_steps": 630, "total_steps": 1778, "loss": 0.2067, "lr": 3.265748519541372e-05, "epoch": 2.4803149606299213, "percentage": 35.43, "elapsed_time": "1:36:43", "remaining_time": "2:56:15"}
|
| 127 |
+
{"current_steps": 635, "total_steps": 1778, "loss": 0.2024, "lr": 3.250485312671411e-05, "epoch": 2.5, "percentage": 35.71, "elapsed_time": "1:37:27", "remaining_time": "2:55:24"}
|
| 128 |
+
{"current_steps": 640, "total_steps": 1778, "loss": 0.2058, "lr": 3.2351015814243235e-05, "epoch": 2.5196850393700787, "percentage": 36.0, "elapsed_time": "1:38:15", "remaining_time": "2:54:42"}
|
| 129 |
+
{"current_steps": 645, "total_steps": 1778, "loss": 0.2006, "lr": 3.219598808516148e-05, "epoch": 2.5393700787401574, "percentage": 36.28, "elapsed_time": "1:38:56", "remaining_time": "2:53:48"}
|
| 130 |
+
{"current_steps": 650, "total_steps": 1778, "loss": 0.2008, "lr": 3.203978488136403e-05, "epoch": 2.559055118110236, "percentage": 36.56, "elapsed_time": "1:39:36", "remaining_time": "2:52:51"}
|
| 131 |
+
{"current_steps": 655, "total_steps": 1778, "loss": 0.1947, "lr": 3.188242125804078e-05, "epoch": 2.578740157480315, "percentage": 36.84, "elapsed_time": "1:40:28", "remaining_time": "2:52:15"}
|
| 132 |
+
{"current_steps": 660, "total_steps": 1778, "loss": 0.197, "lr": 3.1723912382225267e-05, "epoch": 2.5984251968503935, "percentage": 37.12, "elapsed_time": "1:41:11", "remaining_time": "2:51:25"}
|
| 133 |
+
{"current_steps": 665, "total_steps": 1778, "loss": 0.2026, "lr": 3.156427353133286e-05, "epoch": 2.6181102362204722, "percentage": 37.4, "elapsed_time": "1:41:55", "remaining_time": "2:50:34"}
|
| 134 |
+
{"current_steps": 670, "total_steps": 1778, "loss": 0.1994, "lr": 3.140352009168828e-05, "epoch": 2.637795275590551, "percentage": 37.68, "elapsed_time": "1:42:48", "remaining_time": "2:50:01"}
|
| 135 |
+
{"current_steps": 675, "total_steps": 1778, "loss": 0.1974, "lr": 3.124166755704261e-05, "epoch": 2.65748031496063, "percentage": 37.96, "elapsed_time": "1:43:34", "remaining_time": "2:49:14"}
|
| 136 |
+
{"current_steps": 680, "total_steps": 1778, "loss": 0.2072, "lr": 3.1078731527080023e-05, "epoch": 2.677165354330709, "percentage": 38.25, "elapsed_time": "1:44:16", "remaining_time": "2:48:22"}
|
| 137 |
+
{"current_steps": 685, "total_steps": 1778, "loss": 0.1985, "lr": 3.09147277059142e-05, "epoch": 2.6968503937007875, "percentage": 38.53, "elapsed_time": "1:45:03", "remaining_time": "2:47:37"}
|
| 138 |
+
{"current_steps": 690, "total_steps": 1778, "loss": 0.2005, "lr": 3.074967190057478e-05, "epoch": 2.716535433070866, "percentage": 38.81, "elapsed_time": "1:45:59", "remaining_time": "2:47:06"}
|
| 139 |
+
{"current_steps": 695, "total_steps": 1778, "loss": 0.1964, "lr": 3.058358001948381e-05, "epoch": 2.736220472440945, "percentage": 39.09, "elapsed_time": "1:46:46", "remaining_time": "2:46:22"}
|
| 140 |
+
{"current_steps": 700, "total_steps": 1778, "loss": 0.2109, "lr": 3.0416468070922504e-05, "epoch": 2.7559055118110236, "percentage": 39.37, "elapsed_time": "1:47:33", "remaining_time": "2:45:38"}
|
| 141 |
+
{"current_steps": 705, "total_steps": 1778, "loss": 0.2076, "lr": 3.0248352161488267e-05, "epoch": 2.7755905511811023, "percentage": 39.65, "elapsed_time": "1:48:17", "remaining_time": "2:44:49"}
|
| 142 |
+
{"current_steps": 710, "total_steps": 1778, "loss": 0.2039, "lr": 3.007924849454235e-05, "epoch": 2.795275590551181, "percentage": 39.93, "elapsed_time": "1:49:08", "remaining_time": "2:44:10"}
|
| 143 |
+
{"current_steps": 715, "total_steps": 1778, "loss": 0.2017, "lr": 2.9909173368648154e-05, "epoch": 2.8149606299212597, "percentage": 40.21, "elapsed_time": "1:49:51", "remaining_time": "2:43:18"}
|
| 144 |
+
{"current_steps": 720, "total_steps": 1778, "loss": 0.2026, "lr": 2.9738143176000287e-05, "epoch": 2.8346456692913384, "percentage": 40.49, "elapsed_time": "1:50:30", "remaining_time": "2:42:23"}
|
| 145 |
+
{"current_steps": 725, "total_steps": 1778, "loss": 0.2054, "lr": 2.9566174400844692e-05, "epoch": 2.8543307086614176, "percentage": 40.78, "elapsed_time": "1:51:07", "remaining_time": "2:41:24"}
|
| 146 |
+
{"current_steps": 730, "total_steps": 1778, "loss": 0.199, "lr": 2.9393283617889846e-05, "epoch": 2.8740157480314963, "percentage": 41.06, "elapsed_time": "1:51:50", "remaining_time": "2:40:33"}
|
| 147 |
+
{"current_steps": 735, "total_steps": 1778, "loss": 0.2079, "lr": 2.921948749070925e-05, "epoch": 2.893700787401575, "percentage": 41.34, "elapsed_time": "1:52:41", "remaining_time": "2:39:54"}
|
| 148 |
+
{"current_steps": 740, "total_steps": 1778, "loss": 0.2058, "lr": 2.9044802770135375e-05, "epoch": 2.9133858267716537, "percentage": 41.62, "elapsed_time": "1:53:23", "remaining_time": "2:39:03"}
|
| 149 |
+
{"current_steps": 745, "total_steps": 1778, "loss": 0.201, "lr": 2.886924629264517e-05, "epoch": 2.9330708661417324, "percentage": 41.9, "elapsed_time": "1:54:06", "remaining_time": "2:38:13"}
|
| 150 |
+
{"current_steps": 750, "total_steps": 1778, "loss": 0.2007, "lr": 2.8692834978737328e-05, "epoch": 2.952755905511811, "percentage": 42.18, "elapsed_time": "1:54:47", "remaining_time": "2:37:20"}
|
| 151 |
+
{"current_steps": 755, "total_steps": 1778, "loss": 0.2058, "lr": 2.8515585831301456e-05, "epoch": 2.97244094488189, "percentage": 42.46, "elapsed_time": "1:55:29", "remaining_time": "2:36:29"}
|
| 152 |
+
{"current_steps": 760, "total_steps": 1778, "loss": 0.1944, "lr": 2.83375159339793e-05, "epoch": 2.9921259842519685, "percentage": 42.74, "elapsed_time": "1:56:18", "remaining_time": "2:35:47"}
|
| 153 |
+
{"current_steps": 765, "total_steps": 1778, "loss": 0.192, "lr": 2.8158642449518186e-05, "epoch": 3.0118110236220472, "percentage": 43.03, "elapsed_time": "1:57:06", "remaining_time": "2:35:04"}
|
| 154 |
+
{"current_steps": 770, "total_steps": 1778, "loss": 0.1834, "lr": 2.797898261811685e-05, "epoch": 3.031496062992126, "percentage": 43.31, "elapsed_time": "1:57:50", "remaining_time": "2:34:16"}
|
| 155 |
+
{"current_steps": 775, "total_steps": 1778, "loss": 0.1902, "lr": 2.7798553755763768e-05, "epoch": 3.0511811023622046, "percentage": 43.59, "elapsed_time": "1:58:36", "remaining_time": "2:33:30"}
|
| 156 |
+
{"current_steps": 780, "total_steps": 1778, "loss": 0.1828, "lr": 2.7617373252568237e-05, "epoch": 3.0708661417322833, "percentage": 43.87, "elapsed_time": "1:59:17", "remaining_time": "2:32:37"}
|
| 157 |
+
{"current_steps": 785, "total_steps": 1778, "loss": 0.1829, "lr": 2.7435458571084247e-05, "epoch": 3.090551181102362, "percentage": 44.15, "elapsed_time": "1:59:55", "remaining_time": "2:31:41"}
|
| 158 |
+
{"current_steps": 790, "total_steps": 1778, "loss": 0.1877, "lr": 2.725282724462743e-05, "epoch": 3.1102362204724407, "percentage": 44.43, "elapsed_time": "2:00:37", "remaining_time": "2:30:51"}
|
| 159 |
+
{"current_steps": 795, "total_steps": 1778, "loss": 0.1851, "lr": 2.7069496875585145e-05, "epoch": 3.1299212598425195, "percentage": 44.71, "elapsed_time": "2:01:17", "remaining_time": "2:29:59"}
|
| 160 |
+
{"current_steps": 800, "total_steps": 1778, "loss": 0.185, "lr": 2.688548513371994e-05, "epoch": 3.1496062992125986, "percentage": 44.99, "elapsed_time": "2:02:02", "remaining_time": "2:29:11"}
|
| 161 |
+
{"current_steps": 805, "total_steps": 1778, "loss": 0.1913, "lr": 2.670080975446648e-05, "epoch": 3.1692913385826773, "percentage": 45.28, "elapsed_time": "2:02:48", "remaining_time": "2:28:25"}
|
| 162 |
+
{"current_steps": 810, "total_steps": 1778, "loss": 0.1861, "lr": 2.6515488537222198e-05, "epoch": 3.188976377952756, "percentage": 45.56, "elapsed_time": "2:03:31", "remaining_time": "2:27:37"}
|
| 163 |
+
{"current_steps": 815, "total_steps": 1778, "loss": 0.1877, "lr": 2.6329539343631725e-05, "epoch": 3.2086614173228347, "percentage": 45.84, "elapsed_time": "2:04:25", "remaining_time": "2:27:00"}
|
| 164 |
+
{"current_steps": 820, "total_steps": 1778, "loss": 0.1875, "lr": 2.614298009586536e-05, "epoch": 3.2283464566929134, "percentage": 46.12, "elapsed_time": "2:05:08", "remaining_time": "2:26:11"}
|
| 165 |
+
{"current_steps": 825, "total_steps": 1778, "loss": 0.1932, "lr": 2.595582877489171e-05, "epoch": 3.248031496062992, "percentage": 46.4, "elapsed_time": "2:05:59", "remaining_time": "2:25:32"}
|
| 166 |
+
{"current_steps": 830, "total_steps": 1778, "loss": 0.1923, "lr": 2.57681034187446e-05, "epoch": 3.267716535433071, "percentage": 46.68, "elapsed_time": "2:06:48", "remaining_time": "2:24:49"}
|
| 167 |
+
{"current_steps": 835, "total_steps": 1778, "loss": 0.1853, "lr": 2.557982212078459e-05, "epoch": 3.2874015748031495, "percentage": 46.96, "elapsed_time": "2:07:29", "remaining_time": "2:23:58"}
|
| 168 |
+
{"current_steps": 840, "total_steps": 1778, "loss": 0.1878, "lr": 2.5391003027955045e-05, "epoch": 3.3070866141732282, "percentage": 47.24, "elapsed_time": "2:08:11", "remaining_time": "2:23:09"}
|
| 169 |
+
{"current_steps": 845, "total_steps": 1778, "loss": 0.1867, "lr": 2.5201664339033138e-05, "epoch": 3.326771653543307, "percentage": 47.53, "elapsed_time": "2:08:54", "remaining_time": "2:22:19"}
|
| 170 |
+
{"current_steps": 850, "total_steps": 1778, "loss": 0.1802, "lr": 2.501182430287578e-05, "epoch": 3.3464566929133857, "percentage": 47.81, "elapsed_time": "2:09:34", "remaining_time": "2:21:28"}
|
| 171 |
+
{"current_steps": 855, "total_steps": 1778, "loss": 0.184, "lr": 2.4821501216660778e-05, "epoch": 3.366141732283465, "percentage": 48.09, "elapsed_time": "2:10:24", "remaining_time": "2:20:46"}
|
| 172 |
+
{"current_steps": 860, "total_steps": 1778, "loss": 0.1887, "lr": 2.4630713424123315e-05, "epoch": 3.3858267716535435, "percentage": 48.37, "elapsed_time": "2:11:11", "remaining_time": "2:20:02"}
|
| 173 |
+
{"current_steps": 865, "total_steps": 1778, "loss": 0.1935, "lr": 2.443947931378792e-05, "epoch": 3.405511811023622, "percentage": 48.65, "elapsed_time": "2:12:00", "remaining_time": "2:19:20"}
|
| 174 |
+
{"current_steps": 870, "total_steps": 1778, "loss": 0.1857, "lr": 2.4247817317196188e-05, "epoch": 3.425196850393701, "percentage": 48.93, "elapsed_time": "2:12:46", "remaining_time": "2:18:34"}
|
| 175 |
+
{"current_steps": 875, "total_steps": 1778, "loss": 0.1842, "lr": 2.405574590713025e-05, "epoch": 3.4448818897637796, "percentage": 49.21, "elapsed_time": "2:13:26", "remaining_time": "2:17:42"}
|
| 176 |
+
{"current_steps": 880, "total_steps": 1778, "loss": 0.1837, "lr": 2.3863283595832387e-05, "epoch": 3.4645669291338583, "percentage": 49.49, "elapsed_time": "2:14:06", "remaining_time": "2:16:50"}
|
| 177 |
+
{"current_steps": 885, "total_steps": 1778, "loss": 0.192, "lr": 2.3670448933220732e-05, "epoch": 3.484251968503937, "percentage": 49.78, "elapsed_time": "2:14:52", "remaining_time": "2:16:05"}
|
| 178 |
+
{"current_steps": 890, "total_steps": 1778, "loss": 0.1888, "lr": 2.3477260505101427e-05, "epoch": 3.5039370078740157, "percentage": 50.06, "elapsed_time": "2:15:37", "remaining_time": "2:15:19"}
|
| 179 |
+
{"current_steps": 895, "total_steps": 1778, "loss": 0.1815, "lr": 2.328373693137726e-05, "epoch": 3.5236220472440944, "percentage": 50.34, "elapsed_time": "2:16:23", "remaining_time": "2:14:33"}
|
| 180 |
+
{"current_steps": 900, "total_steps": 1778, "loss": 0.1848, "lr": 2.3089896864253066e-05, "epoch": 3.543307086614173, "percentage": 50.62, "elapsed_time": "2:17:06", "remaining_time": "2:13:45"}
|
| 181 |
+
{"current_steps": 905, "total_steps": 1778, "loss": 0.1871, "lr": 2.289575898643796e-05, "epoch": 3.562992125984252, "percentage": 50.9, "elapsed_time": "2:17:49", "remaining_time": "2:12:56"}
|
| 182 |
+
{"current_steps": 910, "total_steps": 1778, "loss": 0.191, "lr": 2.270134200934466e-05, "epoch": 3.5826771653543306, "percentage": 51.18, "elapsed_time": "2:18:34", "remaining_time": "2:12:10"}
|
| 183 |
+
{"current_steps": 915, "total_steps": 1778, "loss": 0.1928, "lr": 2.2506664671286087e-05, "epoch": 3.6023622047244093, "percentage": 51.46, "elapsed_time": "2:19:22", "remaining_time": "2:11:26"}
|
| 184 |
+
{"current_steps": 920, "total_steps": 1778, "loss": 0.1893, "lr": 2.2311745735669258e-05, "epoch": 3.622047244094488, "percentage": 51.74, "elapsed_time": "2:20:06", "remaining_time": "2:10:39"}
|
| 185 |
+
{"current_steps": 925, "total_steps": 1778, "loss": 0.1921, "lr": 2.2116603989186895e-05, "epoch": 3.6417322834645667, "percentage": 52.02, "elapsed_time": "2:20:46", "remaining_time": "2:09:48"}
|
| 186 |
+
{"current_steps": 930, "total_steps": 1778, "loss": 0.183, "lr": 2.192125824000667e-05, "epoch": 3.661417322834646, "percentage": 52.31, "elapsed_time": "2:21:31", "remaining_time": "2:09:02"}
|
| 187 |
+
{"current_steps": 935, "total_steps": 1778, "loss": 0.1866, "lr": 2.1725727315958473e-05, "epoch": 3.6811023622047245, "percentage": 52.59, "elapsed_time": "2:22:27", "remaining_time": "2:08:26"}
|
| 188 |
+
{"current_steps": 940, "total_steps": 1778, "loss": 0.1861, "lr": 2.1530030062719724e-05, "epoch": 3.7007874015748032, "percentage": 52.87, "elapsed_time": "2:23:13", "remaining_time": "2:07:40"}
|
| 189 |
+
{"current_steps": 945, "total_steps": 1778, "loss": 0.1761, "lr": 2.1334185341999024e-05, "epoch": 3.720472440944882, "percentage": 53.15, "elapsed_time": "2:24:02", "remaining_time": "2:06:57"}
|
| 190 |
+
{"current_steps": 950, "total_steps": 1778, "loss": 0.1918, "lr": 2.1138212029718158e-05, "epoch": 3.7401574803149606, "percentage": 53.43, "elapsed_time": "2:24:41", "remaining_time": "2:06:06"}
|
| 191 |
+
{"current_steps": 955, "total_steps": 1778, "loss": 0.192, "lr": 2.0942129014192854e-05, "epoch": 3.7598425196850394, "percentage": 53.71, "elapsed_time": "2:25:29", "remaining_time": "2:05:22"}
|
| 192 |
+
{"current_steps": 960, "total_steps": 1778, "loss": 0.187, "lr": 2.0745955194312276e-05, "epoch": 3.779527559055118, "percentage": 53.99, "elapsed_time": "2:26:12", "remaining_time": "2:04:35"}
|
| 193 |
+
{"current_steps": 965, "total_steps": 1778, "loss": 0.1837, "lr": 2.054970947771747e-05, "epoch": 3.7992125984251968, "percentage": 54.27, "elapsed_time": "2:27:00", "remaining_time": "2:03:50"}
|
| 194 |
+
{"current_steps": 970, "total_steps": 1778, "loss": 0.188, "lr": 2.0353410778979076e-05, "epoch": 3.8188976377952755, "percentage": 54.56, "elapsed_time": "2:27:50", "remaining_time": "2:03:09"}
|
| 195 |
+
{"current_steps": 975, "total_steps": 1778, "loss": 0.1872, "lr": 2.0157078017774228e-05, "epoch": 3.838582677165354, "percentage": 54.84, "elapsed_time": "2:28:33", "remaining_time": "2:02:20"}
|
| 196 |
+
{"current_steps": 980, "total_steps": 1778, "loss": 0.1842, "lr": 1.99607301170631e-05, "epoch": 3.8582677165354333, "percentage": 55.12, "elapsed_time": "2:29:19", "remaining_time": "2:01:35"}
|
| 197 |
+
{"current_steps": 985, "total_steps": 1778, "loss": 0.194, "lr": 1.9764386001265015e-05, "epoch": 3.877952755905512, "percentage": 55.4, "elapsed_time": "2:29:59", "remaining_time": "2:00:45"}
|
| 198 |
+
{"current_steps": 990, "total_steps": 1778, "loss": 0.1827, "lr": 1.956806459443453e-05, "epoch": 3.8976377952755907, "percentage": 55.68, "elapsed_time": "2:30:50", "remaining_time": "2:00:04"}
|
| 199 |
+
{"current_steps": 995, "total_steps": 1778, "loss": 0.189, "lr": 1.9371784818437436e-05, "epoch": 3.9173228346456694, "percentage": 55.96, "elapsed_time": "2:31:30", "remaining_time": "1:59:13"}
|
| 200 |
+
{"current_steps": 1000, "total_steps": 1778, "loss": 0.1887, "lr": 1.9175565591127073e-05, "epoch": 3.937007874015748, "percentage": 56.24, "elapsed_time": "2:32:21", "remaining_time": "1:58:32"}
|
| 201 |
+
{"current_steps": 1005, "total_steps": 1778, "loss": 0.1865, "lr": 1.897942582452097e-05, "epoch": 3.956692913385827, "percentage": 56.52, "elapsed_time": "2:33:03", "remaining_time": "1:57:43"}
|
| 202 |
+
{"current_steps": 1010, "total_steps": 1778, "loss": 0.1896, "lr": 1.8783384422978066e-05, "epoch": 3.9763779527559056, "percentage": 56.81, "elapsed_time": "2:33:49", "remaining_time": "1:56:57"}
|
| 203 |
+
{"current_steps": 1015, "total_steps": 1778, "loss": 0.1913, "lr": 1.8587460281376673e-05, "epoch": 3.9960629921259843, "percentage": 57.09, "elapsed_time": "2:34:27", "remaining_time": "1:56:06"}
|
| 204 |
+
{"current_steps": 1020, "total_steps": 1778, "loss": 0.1747, "lr": 1.8391672283293333e-05, "epoch": 4.015748031496063, "percentage": 57.37, "elapsed_time": "2:35:09", "remaining_time": "1:55:18"}
|
| 205 |
+
{"current_steps": 1025, "total_steps": 1778, "loss": 0.1648, "lr": 1.8196039299182818e-05, "epoch": 4.035433070866142, "percentage": 57.65, "elapsed_time": "2:35:56", "remaining_time": "1:54:33"}
|
| 206 |
+
{"current_steps": 1030, "total_steps": 1778, "loss": 0.1766, "lr": 1.8000580184559315e-05, "epoch": 4.05511811023622, "percentage": 57.93, "elapsed_time": "2:36:38", "remaining_time": "1:53:45"}
|
| 207 |
+
{"current_steps": 1035, "total_steps": 1778, "loss": 0.1711, "lr": 1.7805313778179095e-05, "epoch": 4.074803149606299, "percentage": 58.21, "elapsed_time": "2:37:22", "remaining_time": "1:52:58"}
|
| 208 |
+
{"current_steps": 1040, "total_steps": 1778, "loss": 0.1773, "lr": 1.7610258900224843e-05, "epoch": 4.094488188976378, "percentage": 58.49, "elapsed_time": "2:38:04", "remaining_time": "1:52:10"}
|
| 209 |
+
{"current_steps": 1045, "total_steps": 1778, "loss": 0.1813, "lr": 1.741543435049165e-05, "epoch": 4.1141732283464565, "percentage": 58.77, "elapsed_time": "2:38:48", "remaining_time": "1:51:23"}
|
| 210 |
+
{"current_steps": 1050, "total_steps": 1778, "loss": 0.1776, "lr": 1.7220858906575126e-05, "epoch": 4.133858267716535, "percentage": 59.06, "elapsed_time": "2:39:31", "remaining_time": "1:50:36"}
|
| 211 |
+
{"current_steps": 1055, "total_steps": 1778, "loss": 0.1688, "lr": 1.702655132206154e-05, "epoch": 4.153543307086614, "percentage": 59.34, "elapsed_time": "2:40:16", "remaining_time": "1:49:49"}
|
| 212 |
+
{"current_steps": 1060, "total_steps": 1778, "loss": 0.172, "lr": 1.6832530324720303e-05, "epoch": 4.173228346456693, "percentage": 59.62, "elapsed_time": "2:41:00", "remaining_time": "1:49:03"}
|
| 213 |
+
{"current_steps": 1065, "total_steps": 1778, "loss": 0.184, "lr": 1.6638814614698965e-05, "epoch": 4.192913385826771, "percentage": 59.9, "elapsed_time": "2:41:47", "remaining_time": "1:48:19"}
|
| 214 |
+
{"current_steps": 1070, "total_steps": 1778, "loss": 0.1757, "lr": 1.6445422862720845e-05, "epoch": 4.21259842519685, "percentage": 60.18, "elapsed_time": "2:42:35", "remaining_time": "1:47:34"}
|
| 215 |
+
{"current_steps": 1075, "total_steps": 1778, "loss": 0.175, "lr": 1.6252373708285505e-05, "epoch": 4.232283464566929, "percentage": 60.46, "elapsed_time": "2:43:25", "remaining_time": "1:46:52"}
|
| 216 |
+
{"current_steps": 1080, "total_steps": 1778, "loss": 0.1682, "lr": 1.6059685757872274e-05, "epoch": 4.251968503937007, "percentage": 60.74, "elapsed_time": "2:44:08", "remaining_time": "1:46:04"}
|
| 217 |
+
{"current_steps": 1085, "total_steps": 1778, "loss": 0.1686, "lr": 1.5867377583146836e-05, "epoch": 4.271653543307087, "percentage": 61.02, "elapsed_time": "2:44:53", "remaining_time": "1:45:19"}
|
| 218 |
+
{"current_steps": 1090, "total_steps": 1778, "loss": 0.1808, "lr": 1.567546771917135e-05, "epoch": 4.291338582677166, "percentage": 61.3, "elapsed_time": "2:45:34", "remaining_time": "1:44:30"}
|
| 219 |
+
{"current_steps": 1095, "total_steps": 1778, "loss": 0.1771, "lr": 1.548397466261793e-05, "epoch": 4.311023622047244, "percentage": 61.59, "elapsed_time": "2:46:16", "remaining_time": "1:43:42"}
|
| 220 |
+
{"current_steps": 1100, "total_steps": 1778, "loss": 0.1779, "lr": 1.529291686998592e-05, "epoch": 4.330708661417323, "percentage": 61.87, "elapsed_time": "2:46:59", "remaining_time": "1:42:55"}
|
| 221 |
+
{"current_steps": 1105, "total_steps": 1778, "loss": 0.1739, "lr": 1.5102312755823053e-05, "epoch": 4.350393700787402, "percentage": 62.15, "elapsed_time": "2:47:41", "remaining_time": "1:42:08"}
|
| 222 |
+
{"current_steps": 1110, "total_steps": 1778, "loss": 0.175, "lr": 1.4912180690950545e-05, "epoch": 4.3700787401574805, "percentage": 62.43, "elapsed_time": "2:48:28", "remaining_time": "1:41:23"}
|
| 223 |
+
{"current_steps": 1115, "total_steps": 1778, "loss": 0.1726, "lr": 1.4722539000692548e-05, "epoch": 4.389763779527559, "percentage": 62.71, "elapsed_time": "2:49:05", "remaining_time": "1:40:32"}
|
| 224 |
+
{"current_steps": 1120, "total_steps": 1778, "loss": 0.1739, "lr": 1.4533405963109867e-05, "epoch": 4.409448818897638, "percentage": 62.99, "elapsed_time": "2:49:54", "remaining_time": "1:39:48"}
|
| 225 |
+
{"current_steps": 1125, "total_steps": 1778, "loss": 0.1795, "lr": 1.434479980723833e-05, "epoch": 4.429133858267717, "percentage": 63.27, "elapsed_time": "2:50:46", "remaining_time": "1:39:07"}
|
| 226 |
+
{"current_steps": 1130, "total_steps": 1778, "loss": 0.1763, "lr": 1.4156738711331793e-05, "epoch": 4.448818897637795, "percentage": 63.55, "elapsed_time": "2:51:28", "remaining_time": "1:38:20"}
|
| 227 |
+
{"current_steps": 1135, "total_steps": 1778, "loss": 0.1706, "lr": 1.3969240801110088e-05, "epoch": 4.468503937007874, "percentage": 63.84, "elapsed_time": "2:52:13", "remaining_time": "1:37:34"}
|
| 228 |
+
{"current_steps": 1140, "total_steps": 1778, "loss": 0.1723, "lr": 1.3782324148012061e-05, "epoch": 4.488188976377953, "percentage": 64.12, "elapsed_time": "2:52:51", "remaining_time": "1:36:44"}
|
| 229 |
+
{"current_steps": 1145, "total_steps": 1778, "loss": 0.1704, "lr": 1.3596006767453766e-05, "epoch": 4.5078740157480315, "percentage": 64.4, "elapsed_time": "2:53:32", "remaining_time": "1:35:56"}
|
| 230 |
+
{"current_steps": 1150, "total_steps": 1778, "loss": 0.1821, "lr": 1.3410306617092134e-05, "epoch": 4.52755905511811, "percentage": 64.68, "elapsed_time": "2:54:17", "remaining_time": "1:35:10"}
|
| 231 |
+
{"current_steps": 1155, "total_steps": 1778, "loss": 0.1767, "lr": 1.3225241595094173e-05, "epoch": 4.547244094488189, "percentage": 64.96, "elapsed_time": "2:55:01", "remaining_time": "1:34:24"}
|
| 232 |
+
{"current_steps": 1160, "total_steps": 1778, "loss": 0.1748, "lr": 1.3040829538411876e-05, "epoch": 4.566929133858268, "percentage": 65.24, "elapsed_time": "2:55:47", "remaining_time": "1:33:39"}
|
| 233 |
+
{"current_steps": 1165, "total_steps": 1778, "loss": 0.1785, "lr": 1.2857088221063099e-05, "epoch": 4.586614173228346, "percentage": 65.52, "elapsed_time": "2:56:36", "remaining_time": "1:32:55"}
|
| 234 |
+
{"current_steps": 1170, "total_steps": 1778, "loss": 0.1782, "lr": 1.2674035352418425e-05, "epoch": 4.606299212598425, "percentage": 65.8, "elapsed_time": "2:57:23", "remaining_time": "1:32:10"}
|
| 235 |
+
{"current_steps": 1175, "total_steps": 1778, "loss": 0.1658, "lr": 1.2491688575494337e-05, "epoch": 4.625984251968504, "percentage": 66.09, "elapsed_time": "2:58:04", "remaining_time": "1:31:22"}
|
| 236 |
+
{"current_steps": 1180, "total_steps": 1778, "loss": 0.1641, "lr": 1.231006546525273e-05, "epoch": 4.645669291338582, "percentage": 66.37, "elapsed_time": "2:58:48", "remaining_time": "1:30:37"}
|
| 237 |
+
{"current_steps": 1185, "total_steps": 1778, "loss": 0.1657, "lr": 1.2129183526906971e-05, "epoch": 4.665354330708661, "percentage": 66.65, "elapsed_time": "2:59:27", "remaining_time": "1:29:48"}
|
| 238 |
+
{"current_steps": 1190, "total_steps": 1778, "loss": 0.1761, "lr": 1.1949060194234775e-05, "epoch": 4.68503937007874, "percentage": 66.93, "elapsed_time": "3:00:23", "remaining_time": "1:29:07"}
|
| 239 |
+
{"current_steps": 1195, "total_steps": 1778, "loss": 0.1807, "lr": 1.1769712827897825e-05, "epoch": 4.7047244094488185, "percentage": 67.21, "elapsed_time": "3:01:09", "remaining_time": "1:28:22"}
|
| 240 |
+
{"current_steps": 1200, "total_steps": 1778, "loss": 0.1746, "lr": 1.159115871376858e-05, "epoch": 4.724409448818898, "percentage": 67.49, "elapsed_time": "3:01:53", "remaining_time": "1:27:36"}
|
| 241 |
+
{"current_steps": 1205, "total_steps": 1778, "loss": 0.1763, "lr": 1.1413415061264205e-05, "epoch": 4.744094488188976, "percentage": 67.77, "elapsed_time": "3:02:33", "remaining_time": "1:26:48"}
|
| 242 |
+
{"current_steps": 1210, "total_steps": 1778, "loss": 0.1796, "lr": 1.1236499001687853e-05, "epoch": 4.7637795275590555, "percentage": 68.05, "elapsed_time": "3:03:16", "remaining_time": "1:26:02"}
|
| 243 |
+
{"current_steps": 1215, "total_steps": 1778, "loss": 0.1682, "lr": 1.106042758657758e-05, "epoch": 4.783464566929134, "percentage": 68.34, "elapsed_time": "3:04:02", "remaining_time": "1:25:16"}
|
| 244 |
+
{"current_steps": 1220, "total_steps": 1778, "loss": 0.1822, "lr": 1.0885217786062837e-05, "epoch": 4.803149606299213, "percentage": 68.62, "elapsed_time": "3:04:50", "remaining_time": "1:24:32"}
|
| 245 |
+
{"current_steps": 1225, "total_steps": 1778, "loss": 0.179, "lr": 1.0710886487228868e-05, "epoch": 4.822834645669292, "percentage": 68.9, "elapsed_time": "3:05:36", "remaining_time": "1:23:47"}
|
| 246 |
+
{"current_steps": 1230, "total_steps": 1778, "loss": 0.1729, "lr": 1.053745049248911e-05, "epoch": 4.84251968503937, "percentage": 69.18, "elapsed_time": "3:06:26", "remaining_time": "1:23:03"}
|
| 247 |
+
{"current_steps": 1235, "total_steps": 1778, "loss": 0.1746, "lr": 1.0364926517965693e-05, "epoch": 4.862204724409449, "percentage": 69.46, "elapsed_time": "3:07:09", "remaining_time": "1:22:17"}
|
| 248 |
+
{"current_steps": 1240, "total_steps": 1778, "loss": 0.176, "lr": 1.0193331191878388e-05, "epoch": 4.881889763779528, "percentage": 69.74, "elapsed_time": "3:07:52", "remaining_time": "1:21:30"}
|
| 249 |
+
{"current_steps": 1245, "total_steps": 1778, "loss": 0.1805, "lr": 1.0022681052941856e-05, "epoch": 4.9015748031496065, "percentage": 70.02, "elapsed_time": "3:08:34", "remaining_time": "1:20:44"}
|
| 250 |
+
{"current_steps": 1250, "total_steps": 1778, "loss": 0.1702, "lr": 9.852992548771674e-06, "epoch": 4.921259842519685, "percentage": 70.3, "elapsed_time": "3:09:18", "remaining_time": "1:19:57"}
|
| 251 |
+
{"current_steps": 1255, "total_steps": 1778, "loss": 0.185, "lr": 9.684282034299053e-06, "epoch": 4.940944881889764, "percentage": 70.58, "elapsed_time": "3:10:06", "remaining_time": "1:19:13"}
|
| 252 |
+
{"current_steps": 1260, "total_steps": 1778, "loss": 0.1765, "lr": 9.516565770194523e-06, "epoch": 4.960629921259843, "percentage": 70.87, "elapsed_time": "3:10:50", "remaining_time": "1:18:27"}
|
| 253 |
+
{"current_steps": 1265, "total_steps": 1778, "loss": 0.1767, "lr": 9.349859921300704e-06, "epoch": 4.980314960629921, "percentage": 71.15, "elapsed_time": "3:11:38", "remaining_time": "1:17:42"}
|
| 254 |
+
{"current_steps": 1270, "total_steps": 1778, "loss": 0.1715, "lr": 9.184180555074258e-06, "epoch": 5.0, "percentage": 71.43, "elapsed_time": "3:12:24", "remaining_time": "1:16:57"}
|
| 255 |
+
{"current_steps": 1275, "total_steps": 1778, "loss": 0.1633, "lr": 9.019543640037363e-06, "epoch": 5.019685039370079, "percentage": 71.71, "elapsed_time": "3:13:12", "remaining_time": "1:16:13"}
|
| 256 |
+
{"current_steps": 1280, "total_steps": 1778, "loss": 0.1683, "lr": 8.855965044238554e-06, "epoch": 5.039370078740157, "percentage": 71.99, "elapsed_time": "3:13:59", "remaining_time": "1:15:28"}
|
| 257 |
+
{"current_steps": 1285, "total_steps": 1778, "loss": 0.1614, "lr": 8.693460533723346e-06, "epoch": 5.059055118110236, "percentage": 72.27, "elapsed_time": "3:14:42", "remaining_time": "1:14:42"}
|
| 258 |
+
{"current_steps": 1290, "total_steps": 1778, "loss": 0.1663, "lr": 8.532045771014693e-06, "epoch": 5.078740157480315, "percentage": 72.55, "elapsed_time": "3:15:21", "remaining_time": "1:13:54"}
|
| 259 |
+
{"current_steps": 1295, "total_steps": 1778, "loss": 0.1637, "lr": 8.37173631360339e-06, "epoch": 5.0984251968503935, "percentage": 72.83, "elapsed_time": "3:16:08", "remaining_time": "1:13:09"}
|
| 260 |
+
{"current_steps": 1300, "total_steps": 1778, "loss": 0.1701, "lr": 8.212547612448595e-06, "epoch": 5.118110236220472, "percentage": 73.12, "elapsed_time": "3:16:51", "remaining_time": "1:12:22"}
|
| 261 |
+
{"current_steps": 1305, "total_steps": 1778, "loss": 0.1691, "lr": 8.054495010488658e-06, "epoch": 5.137795275590551, "percentage": 73.4, "elapsed_time": "3:17:34", "remaining_time": "1:11:36"}
|
| 262 |
+
{"current_steps": 1310, "total_steps": 1778, "loss": 0.1696, "lr": 7.897593741162316e-06, "epoch": 5.15748031496063, "percentage": 73.68, "elapsed_time": "3:18:10", "remaining_time": "1:10:47"}
|
| 263 |
+
{"current_steps": 1315, "total_steps": 1778, "loss": 0.1668, "lr": 7.741858926940475e-06, "epoch": 5.177165354330708, "percentage": 73.96, "elapsed_time": "3:18:55", "remaining_time": "1:10:02"}
|
| 264 |
+
{"current_steps": 1320, "total_steps": 1778, "loss": 0.1701, "lr": 7.587305577868658e-06, "epoch": 5.196850393700787, "percentage": 74.24, "elapsed_time": "3:19:46", "remaining_time": "1:09:19"}
|
| 265 |
+
{"current_steps": 1325, "total_steps": 1778, "loss": 0.1575, "lr": 7.433948590120326e-06, "epoch": 5.216535433070866, "percentage": 74.52, "elapsed_time": "3:20:34", "remaining_time": "1:08:34"}
|
| 266 |
+
{"current_steps": 1330, "total_steps": 1778, "loss": 0.164, "lr": 7.281802744561166e-06, "epoch": 5.2362204724409445, "percentage": 74.8, "elapsed_time": "3:21:17", "remaining_time": "1:07:48"}
|
| 267 |
+
{"current_steps": 1335, "total_steps": 1778, "loss": 0.1708, "lr": 7.130882705324422e-06, "epoch": 5.255905511811024, "percentage": 75.08, "elapsed_time": "3:21:58", "remaining_time": "1:07:01"}
|
| 268 |
+
{"current_steps": 1340, "total_steps": 1778, "loss": 0.1599, "lr": 6.9812030183976e-06, "epoch": 5.275590551181103, "percentage": 75.37, "elapsed_time": "3:22:48", "remaining_time": "1:06:17"}
|
| 269 |
+
{"current_steps": 1345, "total_steps": 1778, "loss": 0.167, "lr": 6.832778110220457e-06, "epoch": 5.2952755905511815, "percentage": 75.65, "elapsed_time": "3:23:34", "remaining_time": "1:05:32"}
|
| 270 |
+
{"current_steps": 1350, "total_steps": 1778, "loss": 0.1657, "lr": 6.685622286294571e-06, "epoch": 5.31496062992126, "percentage": 75.93, "elapsed_time": "3:24:20", "remaining_time": "1:04:46"}
|
| 271 |
+
{"current_steps": 1355, "total_steps": 1778, "loss": 0.166, "lr": 6.539749729804539e-06, "epoch": 5.334645669291339, "percentage": 76.21, "elapsed_time": "3:25:08", "remaining_time": "1:04:02"}
|
| 272 |
+
{"current_steps": 1360, "total_steps": 1778, "loss": 0.1674, "lr": 6.395174500250949e-06, "epoch": 5.354330708661418, "percentage": 76.49, "elapsed_time": "3:26:01", "remaining_time": "1:03:19"}
|
| 273 |
+
{"current_steps": 1365, "total_steps": 1778, "loss": 0.1705, "lr": 6.251910532095349e-06, "epoch": 5.374015748031496, "percentage": 76.77, "elapsed_time": "3:26:47", "remaining_time": "1:02:34"}
|
| 274 |
+
{"current_steps": 1370, "total_steps": 1778, "loss": 0.1759, "lr": 6.109971633417169e-06, "epoch": 5.393700787401575, "percentage": 77.05, "elapsed_time": "3:27:29", "remaining_time": "1:01:47"}
|
| 275 |
+
{"current_steps": 1375, "total_steps": 1778, "loss": 0.1639, "lr": 5.969371484582887e-06, "epoch": 5.413385826771654, "percentage": 77.33, "elapsed_time": "3:28:08", "remaining_time": "1:01:00"}
|
| 276 |
+
{"current_steps": 1380, "total_steps": 1778, "loss": 0.1662, "lr": 5.830123636927485e-06, "epoch": 5.433070866141732, "percentage": 77.62, "elapsed_time": "3:28:47", "remaining_time": "1:00:12"}
|
| 277 |
+
{"current_steps": 1385, "total_steps": 1778, "loss": 0.1639, "lr": 5.692241511448342e-06, "epoch": 5.452755905511811, "percentage": 77.9, "elapsed_time": "3:29:28", "remaining_time": "0:59:26"}
|
| 278 |
+
{"current_steps": 1390, "total_steps": 1778, "loss": 0.1623, "lr": 5.555738397511699e-06, "epoch": 5.47244094488189, "percentage": 78.18, "elapsed_time": "3:30:21", "remaining_time": "0:58:42"}
|
| 279 |
+
{"current_steps": 1395, "total_steps": 1778, "loss": 0.1674, "lr": 5.4206274515717735e-06, "epoch": 5.4921259842519685, "percentage": 78.46, "elapsed_time": "3:31:05", "remaining_time": "0:57:57"}
|
| 280 |
+
{"current_steps": 1400, "total_steps": 1778, "loss": 0.1683, "lr": 5.286921695902749e-06, "epoch": 5.511811023622047, "percentage": 78.74, "elapsed_time": "3:31:46", "remaining_time": "0:57:10"}
|
| 281 |
+
{"current_steps": 1405, "total_steps": 1778, "loss": 0.1726, "lr": 5.154634017343662e-06, "epoch": 5.531496062992126, "percentage": 79.02, "elapsed_time": "3:32:36", "remaining_time": "0:56:26"}
|
| 282 |
+
{"current_steps": 1410, "total_steps": 1778, "loss": 0.1699, "lr": 5.023777166056294e-06, "epoch": 5.551181102362205, "percentage": 79.3, "elapsed_time": "3:33:16", "remaining_time": "0:55:39"}
|
| 283 |
+
{"current_steps": 1415, "total_steps": 1778, "loss": 0.1748, "lr": 4.89436375429633e-06, "epoch": 5.570866141732283, "percentage": 79.58, "elapsed_time": "3:34:04", "remaining_time": "0:54:54"}
|
| 284 |
+
{"current_steps": 1420, "total_steps": 1778, "loss": 0.1695, "lr": 4.766406255197751e-06, "epoch": 5.590551181102362, "percentage": 79.87, "elapsed_time": "3:34:56", "remaining_time": "0:54:11"}
|
| 285 |
+
{"current_steps": 1425, "total_steps": 1778, "loss": 0.1687, "lr": 4.639917001570644e-06, "epoch": 5.610236220472441, "percentage": 80.15, "elapsed_time": "3:35:38", "remaining_time": "0:53:25"}
|
| 286 |
+
{"current_steps": 1430, "total_steps": 1778, "loss": 0.166, "lr": 4.51490818471255e-06, "epoch": 5.6299212598425195, "percentage": 80.43, "elapsed_time": "3:36:24", "remaining_time": "0:52:39"}
|
| 287 |
+
{"current_steps": 1435, "total_steps": 1778, "loss": 0.1566, "lr": 4.391391853233404e-06, "epoch": 5.649606299212598, "percentage": 80.71, "elapsed_time": "3:37:13", "remaining_time": "0:51:55"}
|
| 288 |
+
{"current_steps": 1440, "total_steps": 1778, "loss": 0.1673, "lr": 4.269379911894336e-06, "epoch": 5.669291338582677, "percentage": 80.99, "elapsed_time": "3:37:54", "remaining_time": "0:51:08"}
|
| 289 |
+
{"current_steps": 1445, "total_steps": 1778, "loss": 0.1637, "lr": 4.148884120460186e-06, "epoch": 5.688976377952756, "percentage": 81.27, "elapsed_time": "3:38:43", "remaining_time": "0:50:24"}
|
| 290 |
+
{"current_steps": 1450, "total_steps": 1778, "loss": 0.1628, "lr": 4.029916092566131e-06, "epoch": 5.708661417322834, "percentage": 81.55, "elapsed_time": "3:39:36", "remaining_time": "0:49:40"}
|
| 291 |
+
{"current_steps": 1455, "total_steps": 1778, "loss": 0.1654, "lr": 3.91248729459831e-06, "epoch": 5.728346456692913, "percentage": 81.83, "elapsed_time": "3:40:20", "remaining_time": "0:48:54"}
|
| 292 |
+
{"current_steps": 1460, "total_steps": 1778, "loss": 0.168, "lr": 3.796609044588686e-06, "epoch": 5.748031496062993, "percentage": 82.11, "elapsed_time": "3:41:06", "remaining_time": "0:48:09"}
|
| 293 |
+
{"current_steps": 1465, "total_steps": 1778, "loss": 0.1673, "lr": 3.682292511124179e-06, "epoch": 5.76771653543307, "percentage": 82.4, "elapsed_time": "3:41:50", "remaining_time": "0:47:23"}
|
| 294 |
+
{"current_steps": 1470, "total_steps": 1778, "loss": 0.1627, "lr": 3.569548712270201e-06, "epoch": 5.78740157480315, "percentage": 82.68, "elapsed_time": "3:42:41", "remaining_time": "0:46:39"}
|
| 295 |
+
{"current_steps": 1475, "total_steps": 1778, "loss": 0.1636, "lr": 3.4583885145087613e-06, "epoch": 5.807086614173229, "percentage": 82.96, "elapsed_time": "3:43:18", "remaining_time": "0:45:52"}
|
| 296 |
+
{"current_steps": 1480, "total_steps": 1778, "loss": 0.1671, "lr": 3.348822631691082e-06, "epoch": 5.826771653543307, "percentage": 83.24, "elapsed_time": "3:43:59", "remaining_time": "0:45:06"}
|
| 297 |
+
{"current_steps": 1485, "total_steps": 1778, "loss": 0.1649, "lr": 3.240861624004983e-06, "epoch": 5.846456692913386, "percentage": 83.52, "elapsed_time": "3:44:48", "remaining_time": "0:44:21"}
|
| 298 |
+
{"current_steps": 1490, "total_steps": 1778, "loss": 0.166, "lr": 3.1345158969570933e-06, "epoch": 5.866141732283465, "percentage": 83.8, "elapsed_time": "3:45:31", "remaining_time": "0:43:35"}
|
| 299 |
+
{"current_steps": 1495, "total_steps": 1778, "loss": 0.1654, "lr": 3.0297957003699284e-06, "epoch": 5.8858267716535435, "percentage": 84.08, "elapsed_time": "3:46:08", "remaining_time": "0:42:48"}
|
| 300 |
+
{"current_steps": 1500, "total_steps": 1778, "loss": 0.1664, "lr": 2.926711127393993e-06, "epoch": 5.905511811023622, "percentage": 84.36, "elapsed_time": "3:46:55", "remaining_time": "0:42:03"}
|
| 301 |
+
{"current_steps": 1505, "total_steps": 1778, "loss": 0.1652, "lr": 2.8252721135349892e-06, "epoch": 5.925196850393701, "percentage": 84.65, "elapsed_time": "3:48:19", "remaining_time": "0:41:25"}
|
| 302 |
+
{"current_steps": 1510, "total_steps": 1778, "loss": 0.1666, "lr": 2.7254884356961976e-06, "epoch": 5.94488188976378, "percentage": 84.93, "elapsed_time": "3:49:05", "remaining_time": "0:40:39"}
|
| 303 |
+
{"current_steps": 1515, "total_steps": 1778, "loss": 0.1671, "lr": 2.6273697112361786e-06, "epoch": 5.964566929133858, "percentage": 85.21, "elapsed_time": "3:49:51", "remaining_time": "0:39:54"}
|
| 304 |
+
{"current_steps": 1520, "total_steps": 1778, "loss": 0.175, "lr": 2.5309253970418056e-06, "epoch": 5.984251968503937, "percentage": 85.49, "elapsed_time": "3:50:38", "remaining_time": "0:39:08"}
|
| 305 |
+
{"current_steps": 1525, "total_steps": 1778, "loss": 0.1631, "lr": 2.436164788616815e-06, "epoch": 6.003937007874016, "percentage": 85.77, "elapsed_time": "3:51:24", "remaining_time": "0:38:23"}
|
| 306 |
+
{"current_steps": 1530, "total_steps": 1778, "loss": 0.1686, "lr": 2.3430970191858873e-06, "epoch": 6.0236220472440944, "percentage": 86.05, "elapsed_time": "3:52:12", "remaining_time": "0:37:38"}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:347803b23f38dc6a34a9d40b5bad76f7072e95cd500b182ea177c0e4a2d4c37f
|
| 3 |
+
size 8657
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|