Add files using upload-large-folder tool
Browse files- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/added_tokens.json +24 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/chat_template.jinja +54 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/config.json +58 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/generation_config.json +7 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/merges.txt +0 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/model.safetensors.index.json +347 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/special_tokens_map.json +31 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/tokenizer_config.json +208 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/vocab.json +0 -0
- qwen2_5_7b_instruct/limo_filtered_incorrect/trainer_log.jsonl +370 -0
- qwen2_5_math_7b/limo/checkpoint-1230/added_tokens.json +24 -0
- qwen2_5_math_7b/limo/checkpoint-1230/config.json +59 -0
- qwen2_5_math_7b/limo/checkpoint-1230/generation_config.json +6 -0
- qwen2_5_math_7b/limo/checkpoint-1230/model.safetensors.index.json +347 -0
- qwen2_5_math_7b/limo/checkpoint-1230/special_tokens_map.json +31 -0
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/chat_template.jinja
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{%- if tools %}
|
| 2 |
+
{{- '<|im_start|>system\n' }}
|
| 3 |
+
{%- if messages[0]['role'] == 'system' %}
|
| 4 |
+
{{- messages[0]['content'] }}
|
| 5 |
+
{%- else %}
|
| 6 |
+
{{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
|
| 7 |
+
{%- endif %}
|
| 8 |
+
{{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
|
| 9 |
+
{%- for tool in tools %}
|
| 10 |
+
{{- "\n" }}
|
| 11 |
+
{{- tool | tojson }}
|
| 12 |
+
{%- endfor %}
|
| 13 |
+
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
|
| 14 |
+
{%- else %}
|
| 15 |
+
{%- if messages[0]['role'] == 'system' %}
|
| 16 |
+
{{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
|
| 17 |
+
{%- else %}
|
| 18 |
+
{{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
|
| 19 |
+
{%- endif %}
|
| 20 |
+
{%- endif %}
|
| 21 |
+
{%- for message in messages %}
|
| 22 |
+
{%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
|
| 23 |
+
{{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
|
| 24 |
+
{%- elif message.role == "assistant" %}
|
| 25 |
+
{{- '<|im_start|>' + message.role }}
|
| 26 |
+
{%- if message.content %}
|
| 27 |
+
{{- '\n' + message.content }}
|
| 28 |
+
{%- endif %}
|
| 29 |
+
{%- for tool_call in message.tool_calls %}
|
| 30 |
+
{%- if tool_call.function is defined %}
|
| 31 |
+
{%- set tool_call = tool_call.function %}
|
| 32 |
+
{%- endif %}
|
| 33 |
+
{{- '\n<tool_call>\n{"name": "' }}
|
| 34 |
+
{{- tool_call.name }}
|
| 35 |
+
{{- '", "arguments": ' }}
|
| 36 |
+
{{- tool_call.arguments | tojson }}
|
| 37 |
+
{{- '}\n</tool_call>' }}
|
| 38 |
+
{%- endfor %}
|
| 39 |
+
{{- '<|im_end|>\n' }}
|
| 40 |
+
{%- elif message.role == "tool" %}
|
| 41 |
+
{%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
|
| 42 |
+
{{- '<|im_start|>user' }}
|
| 43 |
+
{%- endif %}
|
| 44 |
+
{{- '\n<tool_response>\n' }}
|
| 45 |
+
{{- message.content }}
|
| 46 |
+
{{- '\n</tool_response>' }}
|
| 47 |
+
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
|
| 48 |
+
{{- '<|im_end|>\n' }}
|
| 49 |
+
{%- endif %}
|
| 50 |
+
{%- endif %}
|
| 51 |
+
{%- endfor %}
|
| 52 |
+
{%- if add_generation_prompt %}
|
| 53 |
+
{{- '<|im_start|>assistant\n' }}
|
| 54 |
+
{%- endif %}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/config.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen2ForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 151643,
|
| 7 |
+
"eos_token_id": 151645,
|
| 8 |
+
"hidden_act": "silu",
|
| 9 |
+
"hidden_size": 3584,
|
| 10 |
+
"initializer_range": 0.02,
|
| 11 |
+
"intermediate_size": 18944,
|
| 12 |
+
"layer_types": [
|
| 13 |
+
"full_attention",
|
| 14 |
+
"full_attention",
|
| 15 |
+
"full_attention",
|
| 16 |
+
"full_attention",
|
| 17 |
+
"full_attention",
|
| 18 |
+
"full_attention",
|
| 19 |
+
"full_attention",
|
| 20 |
+
"full_attention",
|
| 21 |
+
"full_attention",
|
| 22 |
+
"full_attention",
|
| 23 |
+
"full_attention",
|
| 24 |
+
"full_attention",
|
| 25 |
+
"full_attention",
|
| 26 |
+
"full_attention",
|
| 27 |
+
"full_attention",
|
| 28 |
+
"full_attention",
|
| 29 |
+
"full_attention",
|
| 30 |
+
"full_attention",
|
| 31 |
+
"full_attention",
|
| 32 |
+
"full_attention",
|
| 33 |
+
"full_attention",
|
| 34 |
+
"full_attention",
|
| 35 |
+
"full_attention",
|
| 36 |
+
"full_attention",
|
| 37 |
+
"full_attention",
|
| 38 |
+
"full_attention",
|
| 39 |
+
"full_attention",
|
| 40 |
+
"full_attention"
|
| 41 |
+
],
|
| 42 |
+
"max_position_embeddings": 32768,
|
| 43 |
+
"max_window_layers": 28,
|
| 44 |
+
"model_type": "qwen2",
|
| 45 |
+
"num_attention_heads": 28,
|
| 46 |
+
"num_hidden_layers": 28,
|
| 47 |
+
"num_key_value_heads": 4,
|
| 48 |
+
"rms_norm_eps": 1e-06,
|
| 49 |
+
"rope_scaling": null,
|
| 50 |
+
"rope_theta": 1000000.0,
|
| 51 |
+
"sliding_window": null,
|
| 52 |
+
"tie_word_embeddings": false,
|
| 53 |
+
"torch_dtype": "float32",
|
| 54 |
+
"transformers_version": "4.55.0",
|
| 55 |
+
"use_cache": false,
|
| 56 |
+
"use_sliding_window": false,
|
| 57 |
+
"vocab_size": 152064
|
| 58 |
+
}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/generation_config.json
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 151643,
|
| 4 |
+
"eos_token_id": 151645,
|
| 5 |
+
"transformers_version": "4.55.0",
|
| 6 |
+
"use_cache": false
|
| 7 |
+
}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/model.safetensors.index.json
ADDED
|
@@ -0,0 +1,347 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_parameters": 1903904128,
|
| 4 |
+
"total_size": 30462466048
|
| 5 |
+
},
|
| 6 |
+
"weight_map": {
|
| 7 |
+
"lm_head.weight": "model-00007-of-00007.safetensors",
|
| 8 |
+
"model.embed_tokens.weight": "model-00001-of-00007.safetensors",
|
| 9 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 10 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
|
| 11 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
|
| 12 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
|
| 13 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
|
| 20 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
|
| 21 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 22 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
|
| 23 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
|
| 24 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
|
| 25 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
|
| 32 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
|
| 33 |
+
"model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 34 |
+
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 35 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 36 |
+
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 37 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 44 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 45 |
+
"model.layers.11.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 46 |
+
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 47 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 48 |
+
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 49 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 56 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 57 |
+
"model.layers.12.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 58 |
+
"model.layers.12.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 59 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 60 |
+
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 61 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 68 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 69 |
+
"model.layers.13.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 70 |
+
"model.layers.13.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 71 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 72 |
+
"model.layers.13.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 73 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 80 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 81 |
+
"model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 82 |
+
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 83 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 84 |
+
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 85 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 92 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 93 |
+
"model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 94 |
+
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 95 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 96 |
+
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 97 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 104 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 105 |
+
"model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 106 |
+
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 107 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 108 |
+
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 109 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 116 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 117 |
+
"model.layers.17.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 118 |
+
"model.layers.17.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 119 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 120 |
+
"model.layers.17.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 121 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 128 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 129 |
+
"model.layers.18.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 130 |
+
"model.layers.18.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 131 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 132 |
+
"model.layers.18.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 133 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 140 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 141 |
+
"model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 142 |
+
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 143 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 144 |
+
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 145 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 152 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 153 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 154 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
|
| 155 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
|
| 156 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
|
| 157 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
|
| 164 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
|
| 165 |
+
"model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 166 |
+
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 167 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 168 |
+
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 169 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 176 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 177 |
+
"model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 178 |
+
"model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 179 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 180 |
+
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 181 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 188 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 189 |
+
"model.layers.22.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 190 |
+
"model.layers.22.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 191 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 192 |
+
"model.layers.22.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 193 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 200 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 201 |
+
"model.layers.23.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 202 |
+
"model.layers.23.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 203 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 204 |
+
"model.layers.23.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 205 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 212 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 213 |
+
"model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 214 |
+
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 215 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 216 |
+
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 217 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 224 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 225 |
+
"model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 226 |
+
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 227 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 228 |
+
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 229 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
|
| 236 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
|
| 237 |
+
"model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 238 |
+
"model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 239 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 240 |
+
"model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 241 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
|
| 248 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
|
| 249 |
+
"model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 250 |
+
"model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 251 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 252 |
+
"model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 253 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
|
| 260 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
|
| 261 |
+
"model.layers.3.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 262 |
+
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 263 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 264 |
+
"model.layers.3.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 265 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 272 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 273 |
+
"model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 274 |
+
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 275 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 276 |
+
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 277 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 284 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 285 |
+
"model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 286 |
+
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 287 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 288 |
+
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 289 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 296 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 297 |
+
"model.layers.6.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 298 |
+
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 299 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 300 |
+
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 301 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 308 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 309 |
+
"model.layers.7.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 310 |
+
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 311 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 312 |
+
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 313 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 320 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 321 |
+
"model.layers.8.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 322 |
+
"model.layers.8.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 323 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 324 |
+
"model.layers.8.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 325 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 332 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 333 |
+
"model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 334 |
+
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 335 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 336 |
+
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 337 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 344 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 345 |
+
"model.norm.weight": "model-00006-of-00007.safetensors"
|
| 346 |
+
}
|
| 347 |
+
}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/tokenizer_config.json
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"clean_up_tokenization_spaces": false,
|
| 199 |
+
"eos_token": "<|im_end|>",
|
| 200 |
+
"errors": "replace",
|
| 201 |
+
"extra_special_tokens": {},
|
| 202 |
+
"model_max_length": 131072,
|
| 203 |
+
"pad_token": "<|endoftext|>",
|
| 204 |
+
"padding_side": "right",
|
| 205 |
+
"split_special_tokens": false,
|
| 206 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 207 |
+
"unk_token": null
|
| 208 |
+
}
|
qwen2_5_7b_instruct/limo_filtered_incorrect/checkpoint-370/vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
qwen2_5_7b_instruct/limo_filtered_incorrect/trainer_log.jsonl
ADDED
|
@@ -0,0 +1,370 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 1, "total_steps": 1850, "loss": 0.7808, "lr": 5e-06, "epoch": 0.005405405405405406, "percentage": 0.05, "elapsed_time": "0:00:03", "remaining_time": "1:46:11"}
|
| 2 |
+
{"current_steps": 2, "total_steps": 1850, "loss": 0.8188, "lr": 4.999996395324314e-06, "epoch": 0.010810810810810811, "percentage": 0.11, "elapsed_time": "0:00:06", "remaining_time": "1:38:22"}
|
| 3 |
+
{"current_steps": 3, "total_steps": 1850, "loss": 0.7499, "lr": 4.99998558130765e-06, "epoch": 0.016216216216216217, "percentage": 0.16, "elapsed_time": "0:00:09", "remaining_time": "1:40:24"}
|
| 4 |
+
{"current_steps": 4, "total_steps": 1850, "loss": 0.6276, "lr": 4.999967557981192e-06, "epoch": 0.021621621621621623, "percentage": 0.22, "elapsed_time": "0:00:11", "remaining_time": "1:30:16"}
|
| 5 |
+
{"current_steps": 5, "total_steps": 1850, "loss": 0.7089, "lr": 4.999942325396917e-06, "epoch": 0.02702702702702703, "percentage": 0.27, "elapsed_time": "0:00:14", "remaining_time": "1:28:34"}
|
| 6 |
+
{"current_steps": 6, "total_steps": 1850, "loss": 0.7681, "lr": 4.999909883627588e-06, "epoch": 0.032432432432432434, "percentage": 0.32, "elapsed_time": "0:00:18", "remaining_time": "1:35:36"}
|
| 7 |
+
{"current_steps": 7, "total_steps": 1850, "loss": 1.2311, "lr": 4.999870232766757e-06, "epoch": 0.03783783783783784, "percentage": 0.38, "elapsed_time": "0:00:21", "remaining_time": "1:33:14"}
|
| 8 |
+
{"current_steps": 8, "total_steps": 1850, "loss": 0.7973, "lr": 4.9998233729287696e-06, "epoch": 0.043243243243243246, "percentage": 0.43, "elapsed_time": "0:00:24", "remaining_time": "1:34:08"}
|
| 9 |
+
{"current_steps": 9, "total_steps": 1850, "loss": 0.6119, "lr": 4.999769304248755e-06, "epoch": 0.04864864864864865, "percentage": 0.49, "elapsed_time": "0:00:26", "remaining_time": "1:31:43"}
|
| 10 |
+
{"current_steps": 10, "total_steps": 1850, "loss": 0.7729, "lr": 4.9997080268826344e-06, "epoch": 0.05405405405405406, "percentage": 0.54, "elapsed_time": "0:00:30", "remaining_time": "1:34:46"}
|
| 11 |
+
{"current_steps": 11, "total_steps": 1850, "loss": 0.5507, "lr": 4.9996395410071165e-06, "epoch": 0.05945945945945946, "percentage": 0.59, "elapsed_time": "0:00:33", "remaining_time": "1:34:05"}
|
| 12 |
+
{"current_steps": 12, "total_steps": 1850, "loss": 0.7063, "lr": 4.999563846819696e-06, "epoch": 0.06486486486486487, "percentage": 0.65, "elapsed_time": "0:00:38", "remaining_time": "1:38:59"}
|
| 13 |
+
{"current_steps": 13, "total_steps": 1850, "loss": 0.7735, "lr": 4.999480944538655e-06, "epoch": 0.07027027027027027, "percentage": 0.7, "elapsed_time": "0:00:40", "remaining_time": "1:34:27"}
|
| 14 |
+
{"current_steps": 14, "total_steps": 1850, "loss": 0.5634, "lr": 4.999390834403063e-06, "epoch": 0.07567567567567568, "percentage": 0.76, "elapsed_time": "0:00:45", "remaining_time": "1:38:56"}
|
| 15 |
+
{"current_steps": 15, "total_steps": 1850, "loss": 0.7101, "lr": 4.999293516672773e-06, "epoch": 0.08108108108108109, "percentage": 0.81, "elapsed_time": "0:00:46", "remaining_time": "1:34:22"}
|
| 16 |
+
{"current_steps": 16, "total_steps": 1850, "loss": 0.7069, "lr": 4.9991889916284255e-06, "epoch": 0.08648648648648649, "percentage": 0.86, "elapsed_time": "0:00:47", "remaining_time": "1:30:19"}
|
| 17 |
+
{"current_steps": 17, "total_steps": 1850, "loss": 0.8364, "lr": 4.999077259571442e-06, "epoch": 0.0918918918918919, "percentage": 0.92, "elapsed_time": "0:00:48", "remaining_time": "1:26:46"}
|
| 18 |
+
{"current_steps": 18, "total_steps": 1850, "loss": 0.6239, "lr": 4.998958320824031e-06, "epoch": 0.0972972972972973, "percentage": 0.97, "elapsed_time": "0:00:52", "remaining_time": "1:29:37"}
|
| 19 |
+
{"current_steps": 19, "total_steps": 1850, "loss": 0.6401, "lr": 4.998832175729179e-06, "epoch": 0.10270270270270271, "percentage": 1.03, "elapsed_time": "0:00:56", "remaining_time": "1:29:57"}
|
| 20 |
+
{"current_steps": 20, "total_steps": 1850, "loss": 0.8762, "lr": 4.998698824650656e-06, "epoch": 0.10810810810810811, "percentage": 1.08, "elapsed_time": "0:00:58", "remaining_time": "1:29:54"}
|
| 21 |
+
{"current_steps": 21, "total_steps": 1850, "loss": 0.6356, "lr": 4.998558267973014e-06, "epoch": 0.11351351351351352, "percentage": 1.14, "elapsed_time": "0:01:01", "remaining_time": "1:29:41"}
|
| 22 |
+
{"current_steps": 22, "total_steps": 1850, "loss": 0.5965, "lr": 4.998410506101579e-06, "epoch": 0.11891891891891893, "percentage": 1.19, "elapsed_time": "0:01:03", "remaining_time": "1:28:28"}
|
| 23 |
+
{"current_steps": 23, "total_steps": 1850, "loss": 0.7616, "lr": 4.9982555394624595e-06, "epoch": 0.12432432432432433, "percentage": 1.24, "elapsed_time": "0:01:06", "remaining_time": "1:28:05"}
|
| 24 |
+
{"current_steps": 24, "total_steps": 1850, "loss": 0.7166, "lr": 4.998093368502539e-06, "epoch": 0.12972972972972974, "percentage": 1.3, "elapsed_time": "0:01:08", "remaining_time": "1:26:37"}
|
| 25 |
+
{"current_steps": 25, "total_steps": 1850, "loss": 0.5398, "lr": 4.9979239936894765e-06, "epoch": 0.13513513513513514, "percentage": 1.35, "elapsed_time": "0:01:11", "remaining_time": "1:26:37"}
|
| 26 |
+
{"current_steps": 26, "total_steps": 1850, "loss": 0.4563, "lr": 4.997747415511705e-06, "epoch": 0.14054054054054055, "percentage": 1.41, "elapsed_time": "0:01:15", "remaining_time": "1:28:25"}
|
| 27 |
+
{"current_steps": 27, "total_steps": 1850, "loss": 0.5254, "lr": 4.997563634478428e-06, "epoch": 0.14594594594594595, "percentage": 1.46, "elapsed_time": "0:01:19", "remaining_time": "1:28:55"}
|
| 28 |
+
{"current_steps": 28, "total_steps": 1850, "loss": 0.3985, "lr": 4.997372651119626e-06, "epoch": 0.15135135135135136, "percentage": 1.51, "elapsed_time": "0:01:23", "remaining_time": "1:30:18"}
|
| 29 |
+
{"current_steps": 29, "total_steps": 1850, "loss": 0.5538, "lr": 4.997174465986044e-06, "epoch": 0.15675675675675677, "percentage": 1.57, "elapsed_time": "0:01:26", "remaining_time": "1:30:27"}
|
| 30 |
+
{"current_steps": 30, "total_steps": 1850, "loss": 0.4613, "lr": 4.996969079649196e-06, "epoch": 0.16216216216216217, "percentage": 1.62, "elapsed_time": "0:01:30", "remaining_time": "1:31:31"}
|
| 31 |
+
{"current_steps": 31, "total_steps": 1850, "loss": 0.5902, "lr": 4.996756492701362e-06, "epoch": 0.16756756756756758, "percentage": 1.68, "elapsed_time": "0:01:31", "remaining_time": "1:29:46"}
|
| 32 |
+
{"current_steps": 32, "total_steps": 1850, "loss": 0.6301, "lr": 4.996536705755591e-06, "epoch": 0.17297297297297298, "percentage": 1.73, "elapsed_time": "0:01:36", "remaining_time": "1:31:25"}
|
| 33 |
+
{"current_steps": 33, "total_steps": 1850, "loss": 0.6533, "lr": 4.996309719445687e-06, "epoch": 0.1783783783783784, "percentage": 1.78, "elapsed_time": "0:01:38", "remaining_time": "1:29:58"}
|
| 34 |
+
{"current_steps": 34, "total_steps": 1850, "loss": 0.656, "lr": 4.996075534426223e-06, "epoch": 0.1837837837837838, "percentage": 1.84, "elapsed_time": "0:01:41", "remaining_time": "1:30:21"}
|
| 35 |
+
{"current_steps": 35, "total_steps": 1850, "loss": 0.7456, "lr": 4.995834151372526e-06, "epoch": 0.1891891891891892, "percentage": 1.89, "elapsed_time": "0:01:46", "remaining_time": "1:31:55"}
|
| 36 |
+
{"current_steps": 36, "total_steps": 1850, "loss": 0.9072, "lr": 4.995585570980685e-06, "epoch": 0.1945945945945946, "percentage": 1.95, "elapsed_time": "0:01:48", "remaining_time": "1:30:54"}
|
| 37 |
+
{"current_steps": 37, "total_steps": 1850, "loss": 0.4235, "lr": 4.995329793967537e-06, "epoch": 0.2, "percentage": 2.0, "elapsed_time": "0:01:49", "remaining_time": "1:29:13"}
|
| 38 |
+
{"current_steps": 38, "total_steps": 1850, "loss": 0.5929, "lr": 4.9950668210706795e-06, "epoch": 0.20540540540540542, "percentage": 2.05, "elapsed_time": "0:01:50", "remaining_time": "1:27:52"}
|
| 39 |
+
{"current_steps": 39, "total_steps": 1850, "loss": 0.582, "lr": 4.994796653048457e-06, "epoch": 0.21081081081081082, "percentage": 2.11, "elapsed_time": "0:01:54", "remaining_time": "1:28:46"}
|
| 40 |
+
{"current_steps": 40, "total_steps": 1850, "loss": 0.6648, "lr": 4.994519290679965e-06, "epoch": 0.21621621621621623, "percentage": 2.16, "elapsed_time": "0:01:57", "remaining_time": "1:28:39"}
|
| 41 |
+
{"current_steps": 41, "total_steps": 1850, "loss": 0.6, "lr": 4.994234734765043e-06, "epoch": 0.22162162162162163, "percentage": 2.22, "elapsed_time": "0:02:02", "remaining_time": "1:30:24"}
|
| 42 |
+
{"current_steps": 42, "total_steps": 1850, "loss": 0.5879, "lr": 4.993942986124278e-06, "epoch": 0.22702702702702704, "percentage": 2.27, "elapsed_time": "0:02:08", "remaining_time": "1:32:17"}
|
| 43 |
+
{"current_steps": 43, "total_steps": 1850, "loss": 0.7423, "lr": 4.9936440455989975e-06, "epoch": 0.23243243243243245, "percentage": 2.32, "elapsed_time": "0:02:11", "remaining_time": "1:32:05"}
|
| 44 |
+
{"current_steps": 44, "total_steps": 1850, "loss": 0.5415, "lr": 4.993337914051266e-06, "epoch": 0.23783783783783785, "percentage": 2.38, "elapsed_time": "0:02:13", "remaining_time": "1:31:04"}
|
| 45 |
+
{"current_steps": 45, "total_steps": 1850, "loss": 0.5937, "lr": 4.99302459236389e-06, "epoch": 0.24324324324324326, "percentage": 2.43, "elapsed_time": "0:02:18", "remaining_time": "1:32:55"}
|
| 46 |
+
{"current_steps": 46, "total_steps": 1850, "loss": 0.6086, "lr": 4.992704081440407e-06, "epoch": 0.24864864864864866, "percentage": 2.49, "elapsed_time": "0:02:19", "remaining_time": "1:31:28"}
|
| 47 |
+
{"current_steps": 47, "total_steps": 1850, "loss": 0.6323, "lr": 4.992376382205088e-06, "epoch": 0.25405405405405407, "percentage": 2.54, "elapsed_time": "0:02:22", "remaining_time": "1:31:17"}
|
| 48 |
+
{"current_steps": 48, "total_steps": 1850, "loss": 0.4514, "lr": 4.992041495602932e-06, "epoch": 0.2594594594594595, "percentage": 2.59, "elapsed_time": "0:02:27", "remaining_time": "1:32:23"}
|
| 49 |
+
{"current_steps": 49, "total_steps": 1850, "loss": 0.7453, "lr": 4.991699422599664e-06, "epoch": 0.2648648648648649, "percentage": 2.65, "elapsed_time": "0:02:29", "remaining_time": "1:31:52"}
|
| 50 |
+
{"current_steps": 50, "total_steps": 1850, "loss": 0.7038, "lr": 4.991350164181735e-06, "epoch": 0.2702702702702703, "percentage": 2.7, "elapsed_time": "0:02:32", "remaining_time": "1:31:46"}
|
| 51 |
+
{"current_steps": 51, "total_steps": 1850, "loss": 0.5296, "lr": 4.990993721356317e-06, "epoch": 0.2756756756756757, "percentage": 2.76, "elapsed_time": "0:02:34", "remaining_time": "1:30:56"}
|
| 52 |
+
{"current_steps": 52, "total_steps": 1850, "loss": 0.5257, "lr": 4.990630095151296e-06, "epoch": 0.2810810810810811, "percentage": 2.81, "elapsed_time": "0:02:36", "remaining_time": "1:30:25"}
|
| 53 |
+
{"current_steps": 53, "total_steps": 1850, "loss": 0.811, "lr": 4.9902592866152765e-06, "epoch": 0.2864864864864865, "percentage": 2.86, "elapsed_time": "0:02:39", "remaining_time": "1:30:04"}
|
| 54 |
+
{"current_steps": 54, "total_steps": 1850, "loss": 0.4431, "lr": 4.989881296817575e-06, "epoch": 0.2918918918918919, "percentage": 2.92, "elapsed_time": "0:02:41", "remaining_time": "1:29:24"}
|
| 55 |
+
{"current_steps": 55, "total_steps": 1850, "loss": 0.3317, "lr": 4.989496126848215e-06, "epoch": 0.2972972972972973, "percentage": 2.97, "elapsed_time": "0:02:45", "remaining_time": "1:30:14"}
|
| 56 |
+
{"current_steps": 56, "total_steps": 1850, "loss": 0.8249, "lr": 4.989103777817928e-06, "epoch": 0.3027027027027027, "percentage": 3.03, "elapsed_time": "0:02:51", "remaining_time": "1:31:33"}
|
| 57 |
+
{"current_steps": 57, "total_steps": 1850, "loss": 0.5625, "lr": 4.988704250858145e-06, "epoch": 0.3081081081081081, "percentage": 3.08, "elapsed_time": "0:02:53", "remaining_time": "1:31:13"}
|
| 58 |
+
{"current_steps": 58, "total_steps": 1850, "loss": 0.397, "lr": 4.988297547121e-06, "epoch": 0.31351351351351353, "percentage": 3.14, "elapsed_time": "0:02:58", "remaining_time": "1:31:53"}
|
| 59 |
+
{"current_steps": 59, "total_steps": 1850, "loss": 0.6648, "lr": 4.98788366777932e-06, "epoch": 0.31891891891891894, "percentage": 3.19, "elapsed_time": "0:03:00", "remaining_time": "1:31:07"}
|
| 60 |
+
{"current_steps": 60, "total_steps": 1850, "loss": 0.5787, "lr": 4.987462614026625e-06, "epoch": 0.32432432432432434, "percentage": 3.24, "elapsed_time": "0:03:02", "remaining_time": "1:30:42"}
|
| 61 |
+
{"current_steps": 61, "total_steps": 1850, "loss": 0.6546, "lr": 4.987034387077126e-06, "epoch": 0.32972972972972975, "percentage": 3.3, "elapsed_time": "0:03:05", "remaining_time": "1:30:40"}
|
| 62 |
+
{"current_steps": 62, "total_steps": 1850, "loss": 0.5057, "lr": 4.986598988165718e-06, "epoch": 0.33513513513513515, "percentage": 3.35, "elapsed_time": "0:03:08", "remaining_time": "1:30:25"}
|
| 63 |
+
{"current_steps": 63, "total_steps": 1850, "loss": 0.6438, "lr": 4.9861564185479785e-06, "epoch": 0.34054054054054056, "percentage": 3.41, "elapsed_time": "0:03:12", "remaining_time": "1:30:59"}
|
| 64 |
+
{"current_steps": 64, "total_steps": 1850, "loss": 0.8114, "lr": 4.985706679500163e-06, "epoch": 0.34594594594594597, "percentage": 3.46, "elapsed_time": "0:03:15", "remaining_time": "1:30:52"}
|
| 65 |
+
{"current_steps": 65, "total_steps": 1850, "loss": 0.6345, "lr": 4.9852497723192025e-06, "epoch": 0.35135135135135137, "percentage": 3.51, "elapsed_time": "0:03:16", "remaining_time": "1:30:04"}
|
| 66 |
+
{"current_steps": 66, "total_steps": 1850, "loss": 0.7756, "lr": 4.9847856983227e-06, "epoch": 0.3567567567567568, "percentage": 3.57, "elapsed_time": "0:03:18", "remaining_time": "1:29:32"}
|
| 67 |
+
{"current_steps": 67, "total_steps": 1850, "loss": 0.7248, "lr": 4.984314458848923e-06, "epoch": 0.3621621621621622, "percentage": 3.62, "elapsed_time": "0:03:20", "remaining_time": "1:28:52"}
|
| 68 |
+
{"current_steps": 68, "total_steps": 1850, "loss": 0.7902, "lr": 4.983836055256804e-06, "epoch": 0.3675675675675676, "percentage": 3.68, "elapsed_time": "0:03:23", "remaining_time": "1:28:58"}
|
| 69 |
+
{"current_steps": 69, "total_steps": 1850, "loss": 0.6512, "lr": 4.983350488925935e-06, "epoch": 0.372972972972973, "percentage": 3.73, "elapsed_time": "0:03:26", "remaining_time": "1:28:38"}
|
| 70 |
+
{"current_steps": 70, "total_steps": 1850, "loss": 0.9753, "lr": 4.982857761256564e-06, "epoch": 0.3783783783783784, "percentage": 3.78, "elapsed_time": "0:03:29", "remaining_time": "1:28:50"}
|
| 71 |
+
{"current_steps": 71, "total_steps": 1850, "loss": 0.5645, "lr": 4.982357873669589e-06, "epoch": 0.3837837837837838, "percentage": 3.84, "elapsed_time": "0:03:30", "remaining_time": "1:28:06"}
|
| 72 |
+
{"current_steps": 72, "total_steps": 1850, "loss": 0.4865, "lr": 4.981850827606556e-06, "epoch": 0.3891891891891892, "percentage": 3.89, "elapsed_time": "0:03:32", "remaining_time": "1:27:39"}
|
| 73 |
+
{"current_steps": 73, "total_steps": 1850, "loss": 0.7247, "lr": 4.981336624529655e-06, "epoch": 0.3945945945945946, "percentage": 3.95, "elapsed_time": "0:03:36", "remaining_time": "1:27:48"}
|
| 74 |
+
{"current_steps": 74, "total_steps": 1850, "loss": 0.8284, "lr": 4.980815265921714e-06, "epoch": 0.4, "percentage": 4.0, "elapsed_time": "0:03:39", "remaining_time": "1:27:48"}
|
| 75 |
+
{"current_steps": 75, "total_steps": 1850, "loss": 0.706, "lr": 4.980286753286196e-06, "epoch": 0.40540540540540543, "percentage": 4.05, "elapsed_time": "0:03:44", "remaining_time": "1:28:29"}
|
| 76 |
+
{"current_steps": 76, "total_steps": 1850, "loss": 0.8405, "lr": 4.979751088147192e-06, "epoch": 0.41081081081081083, "percentage": 4.11, "elapsed_time": "0:03:47", "remaining_time": "1:28:22"}
|
| 77 |
+
{"current_steps": 77, "total_steps": 1850, "loss": 0.8383, "lr": 4.979208272049425e-06, "epoch": 0.41621621621621624, "percentage": 4.16, "elapsed_time": "0:03:49", "remaining_time": "1:27:54"}
|
| 78 |
+
{"current_steps": 78, "total_steps": 1850, "loss": 1.0418, "lr": 4.978658306558235e-06, "epoch": 0.42162162162162165, "percentage": 4.22, "elapsed_time": "0:03:52", "remaining_time": "1:27:53"}
|
| 79 |
+
{"current_steps": 79, "total_steps": 1850, "loss": 0.654, "lr": 4.978101193259578e-06, "epoch": 0.42702702702702705, "percentage": 4.27, "elapsed_time": "0:03:53", "remaining_time": "1:27:18"}
|
| 80 |
+
{"current_steps": 80, "total_steps": 1850, "loss": 0.4631, "lr": 4.977536933760025e-06, "epoch": 0.43243243243243246, "percentage": 4.32, "elapsed_time": "0:03:55", "remaining_time": "1:26:57"}
|
| 81 |
+
{"current_steps": 81, "total_steps": 1850, "loss": 0.5515, "lr": 4.976965529686755e-06, "epoch": 0.43783783783783786, "percentage": 4.38, "elapsed_time": "0:03:58", "remaining_time": "1:26:52"}
|
| 82 |
+
{"current_steps": 82, "total_steps": 1850, "loss": 0.6995, "lr": 4.976386982687548e-06, "epoch": 0.44324324324324327, "percentage": 4.43, "elapsed_time": "0:04:00", "remaining_time": "1:26:22"}
|
| 83 |
+
{"current_steps": 83, "total_steps": 1850, "loss": 0.6191, "lr": 4.9758012944307845e-06, "epoch": 0.4486486486486487, "percentage": 4.49, "elapsed_time": "0:04:03", "remaining_time": "1:26:17"}
|
| 84 |
+
{"current_steps": 84, "total_steps": 1850, "loss": 1.0386, "lr": 4.975208466605436e-06, "epoch": 0.4540540540540541, "percentage": 4.54, "elapsed_time": "0:04:05", "remaining_time": "1:26:01"}
|
| 85 |
+
{"current_steps": 85, "total_steps": 1850, "loss": 0.6995, "lr": 4.974608500921064e-06, "epoch": 0.4594594594594595, "percentage": 4.59, "elapsed_time": "0:04:07", "remaining_time": "1:25:43"}
|
| 86 |
+
{"current_steps": 86, "total_steps": 1850, "loss": 0.5279, "lr": 4.974001399107816e-06, "epoch": 0.4648648648648649, "percentage": 4.65, "elapsed_time": "0:04:09", "remaining_time": "1:25:07"}
|
| 87 |
+
{"current_steps": 87, "total_steps": 1850, "loss": 0.5979, "lr": 4.973387162916415e-06, "epoch": 0.4702702702702703, "percentage": 4.7, "elapsed_time": "0:04:12", "remaining_time": "1:25:22"}
|
| 88 |
+
{"current_steps": 88, "total_steps": 1850, "loss": 0.4009, "lr": 4.972765794118158e-06, "epoch": 0.4756756756756757, "percentage": 4.76, "elapsed_time": "0:04:13", "remaining_time": "1:24:38"}
|
| 89 |
+
{"current_steps": 89, "total_steps": 1850, "loss": 0.6512, "lr": 4.9721372945049114e-06, "epoch": 0.4810810810810811, "percentage": 4.81, "elapsed_time": "0:04:17", "remaining_time": "1:24:57"}
|
| 90 |
+
{"current_steps": 90, "total_steps": 1850, "loss": 0.4848, "lr": 4.971501665889107e-06, "epoch": 0.4864864864864865, "percentage": 4.86, "elapsed_time": "0:04:21", "remaining_time": "1:25:08"}
|
| 91 |
+
{"current_steps": 91, "total_steps": 1850, "loss": 0.3983, "lr": 4.9708589101037306e-06, "epoch": 0.4918918918918919, "percentage": 4.92, "elapsed_time": "0:04:24", "remaining_time": "1:25:08"}
|
| 92 |
+
{"current_steps": 92, "total_steps": 1850, "loss": 0.5519, "lr": 4.970209029002325e-06, "epoch": 0.4972972972972973, "percentage": 4.97, "elapsed_time": "0:04:29", "remaining_time": "1:25:56"}
|
| 93 |
+
{"current_steps": 93, "total_steps": 1850, "loss": 0.7579, "lr": 4.969552024458977e-06, "epoch": 0.5027027027027027, "percentage": 5.03, "elapsed_time": "0:04:33", "remaining_time": "1:26:09"}
|
| 94 |
+
{"current_steps": 94, "total_steps": 1850, "loss": 0.5625, "lr": 4.968887898368318e-06, "epoch": 0.5081081081081081, "percentage": 5.08, "elapsed_time": "0:04:38", "remaining_time": "1:26:40"}
|
| 95 |
+
{"current_steps": 95, "total_steps": 1850, "loss": 0.5626, "lr": 4.968216652645515e-06, "epoch": 0.5135135135135135, "percentage": 5.14, "elapsed_time": "0:04:43", "remaining_time": "1:27:23"}
|
| 96 |
+
{"current_steps": 96, "total_steps": 1850, "loss": 0.7021, "lr": 4.967538289226268e-06, "epoch": 0.518918918918919, "percentage": 5.19, "elapsed_time": "0:04:47", "remaining_time": "1:27:32"}
|
| 97 |
+
{"current_steps": 97, "total_steps": 1850, "loss": 0.5081, "lr": 4.966852810066798e-06, "epoch": 0.5243243243243243, "percentage": 5.24, "elapsed_time": "0:04:51", "remaining_time": "1:27:44"}
|
| 98 |
+
{"current_steps": 98, "total_steps": 1850, "loss": 0.5366, "lr": 4.9661602171438524e-06, "epoch": 0.5297297297297298, "percentage": 5.3, "elapsed_time": "0:04:52", "remaining_time": "1:27:07"}
|
| 99 |
+
{"current_steps": 99, "total_steps": 1850, "loss": 0.6338, "lr": 4.965460512454687e-06, "epoch": 0.5351351351351351, "percentage": 5.35, "elapsed_time": "0:04:54", "remaining_time": "1:26:46"}
|
| 100 |
+
{"current_steps": 100, "total_steps": 1850, "loss": 0.6454, "lr": 4.964753698017071e-06, "epoch": 0.5405405405405406, "percentage": 5.41, "elapsed_time": "0:04:58", "remaining_time": "1:26:55"}
|
| 101 |
+
{"current_steps": 101, "total_steps": 1850, "loss": 0.493, "lr": 4.964039775869271e-06, "epoch": 0.5459459459459459, "percentage": 5.46, "elapsed_time": "0:04:59", "remaining_time": "1:26:23"}
|
| 102 |
+
{"current_steps": 102, "total_steps": 1850, "loss": 0.5626, "lr": 4.963318748070056e-06, "epoch": 0.5513513513513514, "percentage": 5.51, "elapsed_time": "0:05:02", "remaining_time": "1:26:24"}
|
| 103 |
+
{"current_steps": 103, "total_steps": 1850, "loss": 0.6229, "lr": 4.9625906166986815e-06, "epoch": 0.5567567567567567, "percentage": 5.57, "elapsed_time": "0:05:08", "remaining_time": "1:27:13"}
|
| 104 |
+
{"current_steps": 104, "total_steps": 1850, "loss": 0.5698, "lr": 4.961855383854889e-06, "epoch": 0.5621621621621622, "percentage": 5.62, "elapsed_time": "0:05:11", "remaining_time": "1:27:07"}
|
| 105 |
+
{"current_steps": 105, "total_steps": 1850, "loss": 0.4012, "lr": 4.961113051658901e-06, "epoch": 0.5675675675675675, "percentage": 5.68, "elapsed_time": "0:05:13", "remaining_time": "1:26:43"}
|
| 106 |
+
{"current_steps": 106, "total_steps": 1850, "loss": 0.5461, "lr": 4.96036362225141e-06, "epoch": 0.572972972972973, "percentage": 5.73, "elapsed_time": "0:05:15", "remaining_time": "1:26:33"}
|
| 107 |
+
{"current_steps": 107, "total_steps": 1850, "loss": 0.4895, "lr": 4.959607097793575e-06, "epoch": 0.5783783783783784, "percentage": 5.78, "elapsed_time": "0:05:17", "remaining_time": "1:26:20"}
|
| 108 |
+
{"current_steps": 108, "total_steps": 1850, "loss": 0.6228, "lr": 4.9588434804670176e-06, "epoch": 0.5837837837837838, "percentage": 5.84, "elapsed_time": "0:05:21", "remaining_time": "1:26:22"}
|
| 109 |
+
{"current_steps": 109, "total_steps": 1850, "loss": 0.7, "lr": 4.958072772473812e-06, "epoch": 0.5891891891891892, "percentage": 5.89, "elapsed_time": "0:05:24", "remaining_time": "1:26:28"}
|
| 110 |
+
{"current_steps": 110, "total_steps": 1850, "loss": 0.4098, "lr": 4.9572949760364795e-06, "epoch": 0.5945945945945946, "percentage": 5.95, "elapsed_time": "0:05:25", "remaining_time": "1:25:53"}
|
| 111 |
+
{"current_steps": 111, "total_steps": 1850, "loss": 0.4726, "lr": 4.9565100933979835e-06, "epoch": 0.6, "percentage": 6.0, "elapsed_time": "0:05:27", "remaining_time": "1:25:34"}
|
| 112 |
+
{"current_steps": 112, "total_steps": 1850, "loss": 0.8092, "lr": 4.9557181268217225e-06, "epoch": 0.6054054054054054, "percentage": 6.05, "elapsed_time": "0:05:29", "remaining_time": "1:25:17"}
|
| 113 |
+
{"current_steps": 113, "total_steps": 1850, "loss": 0.6809, "lr": 4.954919078591521e-06, "epoch": 0.6108108108108108, "percentage": 6.11, "elapsed_time": "0:05:31", "remaining_time": "1:25:01"}
|
| 114 |
+
{"current_steps": 114, "total_steps": 1850, "loss": 0.5289, "lr": 4.954112951011628e-06, "epoch": 0.6162162162162163, "percentage": 6.16, "elapsed_time": "0:05:35", "remaining_time": "1:25:08"}
|
| 115 |
+
{"current_steps": 115, "total_steps": 1850, "loss": 0.7136, "lr": 4.9532997464067065e-06, "epoch": 0.6216216216216216, "percentage": 6.22, "elapsed_time": "0:05:37", "remaining_time": "1:24:55"}
|
| 116 |
+
{"current_steps": 116, "total_steps": 1850, "loss": 0.8171, "lr": 4.952479467121828e-06, "epoch": 0.6270270270270271, "percentage": 6.27, "elapsed_time": "0:05:39", "remaining_time": "1:24:34"}
|
| 117 |
+
{"current_steps": 117, "total_steps": 1850, "loss": 0.9386, "lr": 4.951652115522463e-06, "epoch": 0.6324324324324324, "percentage": 6.32, "elapsed_time": "0:05:41", "remaining_time": "1:24:12"}
|
| 118 |
+
{"current_steps": 118, "total_steps": 1850, "loss": 0.5044, "lr": 4.950817693994481e-06, "epoch": 0.6378378378378379, "percentage": 6.38, "elapsed_time": "0:05:44", "remaining_time": "1:24:18"}
|
| 119 |
+
{"current_steps": 119, "total_steps": 1850, "loss": 0.5203, "lr": 4.949976204944135e-06, "epoch": 0.6432432432432432, "percentage": 6.43, "elapsed_time": "0:05:47", "remaining_time": "1:24:18"}
|
| 120 |
+
{"current_steps": 120, "total_steps": 1850, "loss": 0.7228, "lr": 4.949127650798063e-06, "epoch": 0.6486486486486487, "percentage": 6.49, "elapsed_time": "0:05:48", "remaining_time": "1:23:47"}
|
| 121 |
+
{"current_steps": 121, "total_steps": 1850, "loss": 0.5169, "lr": 4.948272034003275e-06, "epoch": 0.654054054054054, "percentage": 6.54, "elapsed_time": "0:05:49", "remaining_time": "1:23:14"}
|
| 122 |
+
{"current_steps": 122, "total_steps": 1850, "loss": 0.3863, "lr": 4.947409357027148e-06, "epoch": 0.6594594594594595, "percentage": 6.59, "elapsed_time": "0:05:51", "remaining_time": "1:22:53"}
|
| 123 |
+
{"current_steps": 123, "total_steps": 1850, "loss": 0.7313, "lr": 4.9465396223574165e-06, "epoch": 0.6648648648648648, "percentage": 6.65, "elapsed_time": "0:05:56", "remaining_time": "1:23:21"}
|
| 124 |
+
{"current_steps": 124, "total_steps": 1850, "loss": 0.7378, "lr": 4.945662832502172e-06, "epoch": 0.6702702702702703, "percentage": 6.7, "elapsed_time": "0:05:59", "remaining_time": "1:23:18"}
|
| 125 |
+
{"current_steps": 125, "total_steps": 1850, "loss": 0.7933, "lr": 4.944778989989847e-06, "epoch": 0.6756756756756757, "percentage": 6.76, "elapsed_time": "0:06:01", "remaining_time": "1:23:05"}
|
| 126 |
+
{"current_steps": 126, "total_steps": 1850, "loss": 0.5132, "lr": 4.943888097369216e-06, "epoch": 0.6810810810810811, "percentage": 6.81, "elapsed_time": "0:06:04", "remaining_time": "1:23:05"}
|
| 127 |
+
{"current_steps": 127, "total_steps": 1850, "loss": 0.5264, "lr": 4.942990157209381e-06, "epoch": 0.6864864864864865, "percentage": 6.86, "elapsed_time": "0:06:06", "remaining_time": "1:22:49"}
|
| 128 |
+
{"current_steps": 128, "total_steps": 1850, "loss": 0.6514, "lr": 4.9420851720997674e-06, "epoch": 0.6918918918918919, "percentage": 6.92, "elapsed_time": "0:06:08", "remaining_time": "1:22:43"}
|
| 129 |
+
{"current_steps": 129, "total_steps": 1850, "loss": 1.0914, "lr": 4.94117314465012e-06, "epoch": 0.6972972972972973, "percentage": 6.97, "elapsed_time": "0:06:10", "remaining_time": "1:22:20"}
|
| 130 |
+
{"current_steps": 130, "total_steps": 1850, "loss": 0.5183, "lr": 4.940254077490487e-06, "epoch": 0.7027027027027027, "percentage": 7.03, "elapsed_time": "0:06:13", "remaining_time": "1:22:20"}
|
| 131 |
+
{"current_steps": 131, "total_steps": 1850, "loss": 0.4467, "lr": 4.939327973271222e-06, "epoch": 0.7081081081081081, "percentage": 7.08, "elapsed_time": "0:06:14", "remaining_time": "1:21:58"}
|
| 132 |
+
{"current_steps": 132, "total_steps": 1850, "loss": 0.4674, "lr": 4.9383948346629665e-06, "epoch": 0.7135135135135136, "percentage": 7.14, "elapsed_time": "0:06:16", "remaining_time": "1:21:36"}
|
| 133 |
+
{"current_steps": 133, "total_steps": 1850, "loss": 0.5886, "lr": 4.937454664356652e-06, "epoch": 0.7189189189189189, "percentage": 7.19, "elapsed_time": "0:06:17", "remaining_time": "1:21:18"}
|
| 134 |
+
{"current_steps": 134, "total_steps": 1850, "loss": 0.506, "lr": 4.9365074650634855e-06, "epoch": 0.7243243243243244, "percentage": 7.24, "elapsed_time": "0:06:20", "remaining_time": "1:21:13"}
|
| 135 |
+
{"current_steps": 135, "total_steps": 1850, "loss": 0.783, "lr": 4.9355532395149445e-06, "epoch": 0.7297297297297297, "percentage": 7.3, "elapsed_time": "0:06:24", "remaining_time": "1:21:22"}
|
| 136 |
+
{"current_steps": 136, "total_steps": 1850, "loss": 0.5118, "lr": 4.9345919904627655e-06, "epoch": 0.7351351351351352, "percentage": 7.35, "elapsed_time": "0:06:27", "remaining_time": "1:21:29"}
|
| 137 |
+
{"current_steps": 137, "total_steps": 1850, "loss": 0.5145, "lr": 4.933623720678944e-06, "epoch": 0.7405405405405405, "percentage": 7.41, "elapsed_time": "0:06:31", "remaining_time": "1:21:34"}
|
| 138 |
+
{"current_steps": 138, "total_steps": 1850, "loss": 0.5958, "lr": 4.932648432955718e-06, "epoch": 0.745945945945946, "percentage": 7.46, "elapsed_time": "0:06:34", "remaining_time": "1:21:31"}
|
| 139 |
+
{"current_steps": 139, "total_steps": 1850, "loss": 0.4853, "lr": 4.931666130105564e-06, "epoch": 0.7513513513513513, "percentage": 7.51, "elapsed_time": "0:06:38", "remaining_time": "1:21:39"}
|
| 140 |
+
{"current_steps": 140, "total_steps": 1850, "loss": 0.58, "lr": 4.930676814961189e-06, "epoch": 0.7567567567567568, "percentage": 7.57, "elapsed_time": "0:06:39", "remaining_time": "1:21:20"}
|
| 141 |
+
{"current_steps": 141, "total_steps": 1850, "loss": 0.3721, "lr": 4.92968049037552e-06, "epoch": 0.7621621621621621, "percentage": 7.62, "elapsed_time": "0:06:42", "remaining_time": "1:21:18"}
|
| 142 |
+
{"current_steps": 142, "total_steps": 1850, "loss": 0.6031, "lr": 4.9286771592217005e-06, "epoch": 0.7675675675675676, "percentage": 7.68, "elapsed_time": "0:06:44", "remaining_time": "1:21:00"}
|
| 143 |
+
{"current_steps": 143, "total_steps": 1850, "loss": 0.667, "lr": 4.927666824393076e-06, "epoch": 0.772972972972973, "percentage": 7.73, "elapsed_time": "0:06:46", "remaining_time": "1:20:57"}
|
| 144 |
+
{"current_steps": 144, "total_steps": 1850, "loss": 0.5923, "lr": 4.926649488803191e-06, "epoch": 0.7783783783783784, "percentage": 7.78, "elapsed_time": "0:06:52", "remaining_time": "1:21:28"}
|
| 145 |
+
{"current_steps": 145, "total_steps": 1850, "loss": 0.3196, "lr": 4.925625155385776e-06, "epoch": 0.7837837837837838, "percentage": 7.84, "elapsed_time": "0:06:55", "remaining_time": "1:21:25"}
|
| 146 |
+
{"current_steps": 146, "total_steps": 1850, "loss": 0.712, "lr": 4.924593827094743e-06, "epoch": 0.7891891891891892, "percentage": 7.89, "elapsed_time": "0:06:57", "remaining_time": "1:21:08"}
|
| 147 |
+
{"current_steps": 147, "total_steps": 1850, "loss": 0.4899, "lr": 4.923555506904176e-06, "epoch": 0.7945945945945946, "percentage": 7.95, "elapsed_time": "0:07:01", "remaining_time": "1:21:27"}
|
| 148 |
+
{"current_steps": 148, "total_steps": 1850, "loss": 0.9519, "lr": 4.922510197808321e-06, "epoch": 0.8, "percentage": 8.0, "elapsed_time": "0:07:04", "remaining_time": "1:21:16"}
|
| 149 |
+
{"current_steps": 149, "total_steps": 1850, "loss": 0.489, "lr": 4.921457902821578e-06, "epoch": 0.8054054054054054, "percentage": 8.05, "elapsed_time": "0:07:09", "remaining_time": "1:21:37"}
|
| 150 |
+
{"current_steps": 150, "total_steps": 1850, "loss": 0.974, "lr": 4.920398624978493e-06, "epoch": 0.8108108108108109, "percentage": 8.11, "elapsed_time": "0:07:11", "remaining_time": "1:21:32"}
|
| 151 |
+
{"current_steps": 151, "total_steps": 1850, "loss": 0.5107, "lr": 4.919332367333748e-06, "epoch": 0.8162162162162162, "percentage": 8.16, "elapsed_time": "0:07:14", "remaining_time": "1:21:32"}
|
| 152 |
+
{"current_steps": 152, "total_steps": 1850, "loss": 0.5132, "lr": 4.918259132962154e-06, "epoch": 0.8216216216216217, "percentage": 8.22, "elapsed_time": "0:07:17", "remaining_time": "1:21:23"}
|
| 153 |
+
{"current_steps": 153, "total_steps": 1850, "loss": 0.5708, "lr": 4.917178924958638e-06, "epoch": 0.827027027027027, "percentage": 8.27, "elapsed_time": "0:07:18", "remaining_time": "1:21:03"}
|
| 154 |
+
{"current_steps": 154, "total_steps": 1850, "loss": 0.6738, "lr": 4.916091746438243e-06, "epoch": 0.8324324324324325, "percentage": 8.32, "elapsed_time": "0:07:19", "remaining_time": "1:20:45"}
|
| 155 |
+
{"current_steps": 155, "total_steps": 1850, "loss": 0.5421, "lr": 4.9149976005361085e-06, "epoch": 0.8378378378378378, "percentage": 8.38, "elapsed_time": "0:07:22", "remaining_time": "1:20:42"}
|
| 156 |
+
{"current_steps": 156, "total_steps": 1850, "loss": 0.9456, "lr": 4.913896490407467e-06, "epoch": 0.8432432432432433, "percentage": 8.43, "elapsed_time": "0:07:25", "remaining_time": "1:20:33"}
|
| 157 |
+
{"current_steps": 157, "total_steps": 1850, "loss": 0.5696, "lr": 4.912788419227635e-06, "epoch": 0.8486486486486486, "percentage": 8.49, "elapsed_time": "0:07:29", "remaining_time": "1:20:42"}
|
| 158 |
+
{"current_steps": 158, "total_steps": 1850, "loss": 0.7361, "lr": 4.911673390192002e-06, "epoch": 0.8540540540540541, "percentage": 8.54, "elapsed_time": "0:07:31", "remaining_time": "1:20:31"}
|
| 159 |
+
{"current_steps": 159, "total_steps": 1850, "loss": 0.5894, "lr": 4.910551406516023e-06, "epoch": 0.8594594594594595, "percentage": 8.59, "elapsed_time": "0:07:35", "remaining_time": "1:20:43"}
|
| 160 |
+
{"current_steps": 160, "total_steps": 1850, "loss": 0.8437, "lr": 4.909422471435207e-06, "epoch": 0.8648648648648649, "percentage": 8.65, "elapsed_time": "0:07:37", "remaining_time": "1:20:35"}
|
| 161 |
+
{"current_steps": 161, "total_steps": 1850, "loss": 0.4127, "lr": 4.90828658820511e-06, "epoch": 0.8702702702702703, "percentage": 8.7, "elapsed_time": "0:07:39", "remaining_time": "1:20:24"}
|
| 162 |
+
{"current_steps": 162, "total_steps": 1850, "loss": 0.3873, "lr": 4.907143760101325e-06, "epoch": 0.8756756756756757, "percentage": 8.76, "elapsed_time": "0:07:41", "remaining_time": "1:20:06"}
|
| 163 |
+
{"current_steps": 163, "total_steps": 1850, "loss": 0.4827, "lr": 4.905993990419472e-06, "epoch": 0.8810810810810811, "percentage": 8.81, "elapsed_time": "0:07:44", "remaining_time": "1:20:04"}
|
| 164 |
+
{"current_steps": 164, "total_steps": 1850, "loss": 0.4883, "lr": 4.904837282475187e-06, "epoch": 0.8864864864864865, "percentage": 8.86, "elapsed_time": "0:07:46", "remaining_time": "1:19:51"}
|
| 165 |
+
{"current_steps": 165, "total_steps": 1850, "loss": 0.7961, "lr": 4.9036736396041165e-06, "epoch": 0.8918918918918919, "percentage": 8.92, "elapsed_time": "0:07:49", "remaining_time": "1:19:49"}
|
| 166 |
+
{"current_steps": 166, "total_steps": 1850, "loss": 0.5304, "lr": 4.902503065161905e-06, "epoch": 0.8972972972972973, "percentage": 8.97, "elapsed_time": "0:07:51", "remaining_time": "1:19:44"}
|
| 167 |
+
{"current_steps": 167, "total_steps": 1850, "loss": 0.7701, "lr": 4.901325562524185e-06, "epoch": 0.9027027027027027, "percentage": 9.03, "elapsed_time": "0:07:55", "remaining_time": "1:19:47"}
|
| 168 |
+
{"current_steps": 168, "total_steps": 1850, "loss": 0.4459, "lr": 4.900141135086569e-06, "epoch": 0.9081081081081082, "percentage": 9.08, "elapsed_time": "0:07:58", "remaining_time": "1:19:53"}
|
| 169 |
+
{"current_steps": 169, "total_steps": 1850, "loss": 0.5295, "lr": 4.898949786264638e-06, "epoch": 0.9135135135135135, "percentage": 9.14, "elapsed_time": "0:08:01", "remaining_time": "1:19:48"}
|
| 170 |
+
{"current_steps": 170, "total_steps": 1850, "loss": 0.5185, "lr": 4.897751519493933e-06, "epoch": 0.918918918918919, "percentage": 9.19, "elapsed_time": "0:08:05", "remaining_time": "1:19:55"}
|
| 171 |
+
{"current_steps": 171, "total_steps": 1850, "loss": 0.5937, "lr": 4.896546338229945e-06, "epoch": 0.9243243243243243, "percentage": 9.24, "elapsed_time": "0:08:06", "remaining_time": "1:19:36"}
|
| 172 |
+
{"current_steps": 172, "total_steps": 1850, "loss": 0.4753, "lr": 4.8953342459481034e-06, "epoch": 0.9297297297297298, "percentage": 9.3, "elapsed_time": "0:08:09", "remaining_time": "1:19:36"}
|
| 173 |
+
{"current_steps": 173, "total_steps": 1850, "loss": 0.636, "lr": 4.894115246143768e-06, "epoch": 0.9351351351351351, "percentage": 9.35, "elapsed_time": "0:08:11", "remaining_time": "1:19:24"}
|
| 174 |
+
{"current_steps": 174, "total_steps": 1850, "loss": 0.5151, "lr": 4.892889342332218e-06, "epoch": 0.9405405405405406, "percentage": 9.41, "elapsed_time": "0:08:13", "remaining_time": "1:19:14"}
|
| 175 |
+
{"current_steps": 175, "total_steps": 1850, "loss": 0.7373, "lr": 4.891656538048642e-06, "epoch": 0.9459459459459459, "percentage": 9.46, "elapsed_time": "0:08:19", "remaining_time": "1:19:39"}
|
| 176 |
+
{"current_steps": 176, "total_steps": 1850, "loss": 0.6444, "lr": 4.890416836848128e-06, "epoch": 0.9513513513513514, "percentage": 9.51, "elapsed_time": "0:08:21", "remaining_time": "1:19:34"}
|
| 177 |
+
{"current_steps": 177, "total_steps": 1850, "loss": 0.4956, "lr": 4.889170242305652e-06, "epoch": 0.9567567567567568, "percentage": 9.57, "elapsed_time": "0:08:24", "remaining_time": "1:19:28"}
|
| 178 |
+
{"current_steps": 178, "total_steps": 1850, "loss": 0.7239, "lr": 4.887916758016069e-06, "epoch": 0.9621621621621622, "percentage": 9.62, "elapsed_time": "0:08:28", "remaining_time": "1:19:36"}
|
| 179 |
+
{"current_steps": 179, "total_steps": 1850, "loss": 0.8918, "lr": 4.886656387594104e-06, "epoch": 0.9675675675675676, "percentage": 9.68, "elapsed_time": "0:08:32", "remaining_time": "1:19:42"}
|
| 180 |
+
{"current_steps": 180, "total_steps": 1850, "loss": 0.5549, "lr": 4.885389134674338e-06, "epoch": 0.972972972972973, "percentage": 9.73, "elapsed_time": "0:08:37", "remaining_time": "1:19:57"}
|
| 181 |
+
{"current_steps": 181, "total_steps": 1850, "loss": 0.4737, "lr": 4.884115002911197e-06, "epoch": 0.9783783783783784, "percentage": 9.78, "elapsed_time": "0:08:38", "remaining_time": "1:19:40"}
|
| 182 |
+
{"current_steps": 182, "total_steps": 1850, "loss": 0.6369, "lr": 4.88283399597895e-06, "epoch": 0.9837837837837838, "percentage": 9.84, "elapsed_time": "0:08:43", "remaining_time": "1:19:59"}
|
| 183 |
+
{"current_steps": 183, "total_steps": 1850, "loss": 0.521, "lr": 4.881546117571686e-06, "epoch": 0.9891891891891892, "percentage": 9.89, "elapsed_time": "0:08:45", "remaining_time": "1:19:49"}
|
| 184 |
+
{"current_steps": 184, "total_steps": 1850, "loss": 0.4812, "lr": 4.8802513714033135e-06, "epoch": 0.9945945945945946, "percentage": 9.95, "elapsed_time": "0:08:48", "remaining_time": "1:19:43"}
|
| 185 |
+
{"current_steps": 185, "total_steps": 1850, "loss": 0.824, "lr": 4.878949761207545e-06, "epoch": 1.0, "percentage": 10.0, "elapsed_time": "0:08:49", "remaining_time": "1:19:23"}
|
| 186 |
+
{"current_steps": 186, "total_steps": 1850, "loss": 0.5431, "lr": 4.8776412907378845e-06, "epoch": 1.0054054054054054, "percentage": 10.05, "elapsed_time": "2:11:32", "remaining_time": "19:36:48"}
|
| 187 |
+
{"current_steps": 187, "total_steps": 1850, "loss": 0.4882, "lr": 4.876325963767623e-06, "epoch": 1.0108108108108107, "percentage": 10.11, "elapsed_time": "2:11:34", "remaining_time": "19:30:03"}
|
| 188 |
+
{"current_steps": 188, "total_steps": 1850, "loss": 0.4796, "lr": 4.875003784089822e-06, "epoch": 1.0162162162162163, "percentage": 10.16, "elapsed_time": "2:11:37", "remaining_time": "19:23:38"}
|
| 189 |
+
{"current_steps": 189, "total_steps": 1850, "loss": 0.5344, "lr": 4.873674755517305e-06, "epoch": 1.0216216216216216, "percentage": 10.22, "elapsed_time": "2:11:39", "remaining_time": "19:17:05"}
|
| 190 |
+
{"current_steps": 190, "total_steps": 1850, "loss": 0.5975, "lr": 4.872338881882645e-06, "epoch": 1.027027027027027, "percentage": 10.27, "elapsed_time": "2:11:42", "remaining_time": "19:10:43"}
|
| 191 |
+
{"current_steps": 191, "total_steps": 1850, "loss": 0.3895, "lr": 4.870996167038154e-06, "epoch": 1.0324324324324325, "percentage": 10.32, "elapsed_time": "2:11:45", "remaining_time": "19:04:30"}
|
| 192 |
+
{"current_steps": 192, "total_steps": 1850, "loss": 0.2855, "lr": 4.869646614855877e-06, "epoch": 1.037837837837838, "percentage": 10.38, "elapsed_time": "2:11:47", "remaining_time": "18:58:03"}
|
| 193 |
+
{"current_steps": 193, "total_steps": 1850, "loss": 0.5976, "lr": 4.868290229227567e-06, "epoch": 1.0432432432432432, "percentage": 10.43, "elapsed_time": "2:11:53", "remaining_time": "18:52:19"}
|
| 194 |
+
{"current_steps": 194, "total_steps": 1850, "loss": 0.2478, "lr": 4.866927014064692e-06, "epoch": 1.0486486486486486, "percentage": 10.49, "elapsed_time": "2:11:55", "remaining_time": "18:46:04"}
|
| 195 |
+
{"current_steps": 195, "total_steps": 1850, "loss": 0.7275, "lr": 4.86555697329841e-06, "epoch": 1.054054054054054, "percentage": 10.54, "elapsed_time": "2:11:57", "remaining_time": "18:39:59"}
|
| 196 |
+
{"current_steps": 196, "total_steps": 1850, "loss": 0.5008, "lr": 4.864180110879562e-06, "epoch": 1.0594594594594595, "percentage": 10.59, "elapsed_time": "2:12:01", "remaining_time": "18:34:07"}
|
| 197 |
+
{"current_steps": 197, "total_steps": 1850, "loss": 0.2901, "lr": 4.862796430778663e-06, "epoch": 1.0648648648648649, "percentage": 10.65, "elapsed_time": "2:12:03", "remaining_time": "18:28:01"}
|
| 198 |
+
{"current_steps": 198, "total_steps": 1850, "loss": 0.4771, "lr": 4.861405936985889e-06, "epoch": 1.0702702702702702, "percentage": 10.7, "elapsed_time": "2:12:07", "remaining_time": "18:22:26"}
|
| 199 |
+
{"current_steps": 199, "total_steps": 1850, "loss": 0.4908, "lr": 4.860008633511059e-06, "epoch": 1.0756756756756758, "percentage": 10.76, "elapsed_time": "2:12:12", "remaining_time": "18:16:55"}
|
| 200 |
+
{"current_steps": 200, "total_steps": 1850, "loss": 0.3358, "lr": 4.8586045243836384e-06, "epoch": 1.0810810810810811, "percentage": 10.81, "elapsed_time": "2:12:16", "remaining_time": "18:11:15"}
|
| 201 |
+
{"current_steps": 201, "total_steps": 1850, "loss": 0.452, "lr": 4.857193613652711e-06, "epoch": 1.0864864864864865, "percentage": 10.86, "elapsed_time": "2:12:19", "remaining_time": "18:05:33"}
|
| 202 |
+
{"current_steps": 202, "total_steps": 1850, "loss": 0.5481, "lr": 4.8557759053869775e-06, "epoch": 1.0918918918918918, "percentage": 10.92, "elapsed_time": "2:12:20", "remaining_time": "17:59:39"}
|
| 203 |
+
{"current_steps": 203, "total_steps": 1850, "loss": 0.3405, "lr": 4.854351403674741e-06, "epoch": 1.0972972972972972, "percentage": 10.97, "elapsed_time": "2:12:22", "remaining_time": "17:53:56"}
|
| 204 |
+
{"current_steps": 204, "total_steps": 1850, "loss": 0.4744, "lr": 4.852920112623895e-06, "epoch": 1.1027027027027028, "percentage": 11.03, "elapsed_time": "2:12:23", "remaining_time": "17:48:11"}
|
| 205 |
+
{"current_steps": 205, "total_steps": 1850, "loss": 0.5865, "lr": 4.851482036361912e-06, "epoch": 1.1081081081081081, "percentage": 11.08, "elapsed_time": "2:12:25", "remaining_time": "17:42:34"}
|
| 206 |
+
{"current_steps": 206, "total_steps": 1850, "loss": 0.4132, "lr": 4.850037179035829e-06, "epoch": 1.1135135135135135, "percentage": 11.14, "elapsed_time": "2:12:27", "remaining_time": "17:37:03"}
|
| 207 |
+
{"current_steps": 207, "total_steps": 1850, "loss": 0.4332, "lr": 4.8485855448122425e-06, "epoch": 1.118918918918919, "percentage": 11.19, "elapsed_time": "2:12:29", "remaining_time": "17:31:33"}
|
| 208 |
+
{"current_steps": 208, "total_steps": 1850, "loss": 0.2827, "lr": 4.847127137877286e-06, "epoch": 1.1243243243243244, "percentage": 11.24, "elapsed_time": "2:12:31", "remaining_time": "17:26:08"}
|
| 209 |
+
{"current_steps": 209, "total_steps": 1850, "loss": 0.6248, "lr": 4.8456619624366285e-06, "epoch": 1.1297297297297297, "percentage": 11.3, "elapsed_time": "2:12:34", "remaining_time": "17:20:54"}
|
| 210 |
+
{"current_steps": 210, "total_steps": 1850, "loss": 0.4907, "lr": 4.844190022715456e-06, "epoch": 1.135135135135135, "percentage": 11.35, "elapsed_time": "2:12:37", "remaining_time": "17:15:41"}
|
| 211 |
+
{"current_steps": 211, "total_steps": 1850, "loss": 0.2463, "lr": 4.84271132295846e-06, "epoch": 1.1405405405405404, "percentage": 11.41, "elapsed_time": "2:12:41", "remaining_time": "17:10:40"}
|
| 212 |
+
{"current_steps": 212, "total_steps": 1850, "loss": 0.4732, "lr": 4.841225867429826e-06, "epoch": 1.145945945945946, "percentage": 11.46, "elapsed_time": "2:12:43", "remaining_time": "17:05:27"}
|
| 213 |
+
{"current_steps": 213, "total_steps": 1850, "loss": 0.687, "lr": 4.839733660413224e-06, "epoch": 1.1513513513513514, "percentage": 11.51, "elapsed_time": "2:12:45", "remaining_time": "17:00:17"}
|
| 214 |
+
{"current_steps": 214, "total_steps": 1850, "loss": 0.4676, "lr": 4.838234706211792e-06, "epoch": 1.1567567567567567, "percentage": 11.57, "elapsed_time": "2:12:48", "remaining_time": "16:55:16"}
|
| 215 |
+
{"current_steps": 215, "total_steps": 1850, "loss": 0.3074, "lr": 4.836729009148124e-06, "epoch": 1.1621621621621623, "percentage": 11.62, "elapsed_time": "2:12:51", "remaining_time": "16:50:20"}
|
| 216 |
+
{"current_steps": 216, "total_steps": 1850, "loss": 0.2425, "lr": 4.835216573564261e-06, "epoch": 1.1675675675675676, "percentage": 11.68, "elapsed_time": "2:12:55", "remaining_time": "16:45:30"}
|
| 217 |
+
{"current_steps": 217, "total_steps": 1850, "loss": 0.4914, "lr": 4.833697403821672e-06, "epoch": 1.172972972972973, "percentage": 11.73, "elapsed_time": "2:12:58", "remaining_time": "16:40:42"}
|
| 218 |
+
{"current_steps": 218, "total_steps": 1850, "loss": 0.4524, "lr": 4.8321715043012516e-06, "epoch": 1.1783783783783783, "percentage": 11.78, "elapsed_time": "2:13:01", "remaining_time": "16:35:52"}
|
| 219 |
+
{"current_steps": 219, "total_steps": 1850, "loss": 0.2911, "lr": 4.830638879403296e-06, "epoch": 1.1837837837837837, "percentage": 11.84, "elapsed_time": "2:13:03", "remaining_time": "16:30:53"}
|
| 220 |
+
{"current_steps": 220, "total_steps": 1850, "loss": 0.3556, "lr": 4.8290995335475e-06, "epoch": 1.1891891891891893, "percentage": 11.89, "elapsed_time": "2:13:04", "remaining_time": "16:25:56"}
|
| 221 |
+
{"current_steps": 221, "total_steps": 1850, "loss": 0.3169, "lr": 4.827553471172935e-06, "epoch": 1.1945945945945946, "percentage": 11.95, "elapsed_time": "2:13:05", "remaining_time": "16:21:03"}
|
| 222 |
+
{"current_steps": 222, "total_steps": 1850, "loss": 0.3369, "lr": 4.826000696738045e-06, "epoch": 1.2, "percentage": 12.0, "elapsed_time": "2:13:07", "remaining_time": "16:16:16"}
|
| 223 |
+
{"current_steps": 223, "total_steps": 1850, "loss": 0.6498, "lr": 4.824441214720629e-06, "epoch": 1.2054054054054055, "percentage": 12.05, "elapsed_time": "2:13:09", "remaining_time": "16:11:33"}
|
| 224 |
+
{"current_steps": 224, "total_steps": 1850, "loss": 0.3416, "lr": 4.8228750296178275e-06, "epoch": 1.2108108108108109, "percentage": 12.11, "elapsed_time": "2:13:12", "remaining_time": "16:06:58"}
|
| 225 |
+
{"current_steps": 225, "total_steps": 1850, "loss": 0.348, "lr": 4.821302145946113e-06, "epoch": 1.2162162162162162, "percentage": 12.16, "elapsed_time": "2:13:14", "remaining_time": "16:02:19"}
|
| 226 |
+
{"current_steps": 226, "total_steps": 1850, "loss": 0.4552, "lr": 4.819722568241274e-06, "epoch": 1.2216216216216216, "percentage": 12.22, "elapsed_time": "2:13:16", "remaining_time": "15:57:40"}
|
| 227 |
+
{"current_steps": 227, "total_steps": 1850, "loss": 0.2841, "lr": 4.818136301058401e-06, "epoch": 1.227027027027027, "percentage": 12.27, "elapsed_time": "2:13:20", "remaining_time": "15:53:18"}
|
| 228 |
+
{"current_steps": 228, "total_steps": 1850, "loss": 0.5053, "lr": 4.816543348971879e-06, "epoch": 1.2324324324324325, "percentage": 12.32, "elapsed_time": "2:13:23", "remaining_time": "15:48:54"}
|
| 229 |
+
{"current_steps": 229, "total_steps": 1850, "loss": 0.5424, "lr": 4.814943716575368e-06, "epoch": 1.2378378378378379, "percentage": 12.38, "elapsed_time": "2:13:24", "remaining_time": "15:44:19"}
|
| 230 |
+
{"current_steps": 230, "total_steps": 1850, "loss": 0.7483, "lr": 4.813337408481793e-06, "epoch": 1.2432432432432432, "percentage": 12.43, "elapsed_time": "2:13:27", "remaining_time": "15:39:59"}
|
| 231 |
+
{"current_steps": 231, "total_steps": 1850, "loss": 0.775, "lr": 4.811724429323329e-06, "epoch": 1.2486486486486488, "percentage": 12.49, "elapsed_time": "2:13:29", "remaining_time": "15:35:34"}
|
| 232 |
+
{"current_steps": 232, "total_steps": 1850, "loss": 0.4056, "lr": 4.810104783751389e-06, "epoch": 1.2540540540540541, "percentage": 12.54, "elapsed_time": "2:13:30", "remaining_time": "15:31:04"}
|
| 233 |
+
{"current_steps": 233, "total_steps": 1850, "loss": 0.3644, "lr": 4.8084784764366125e-06, "epoch": 1.2594594594594595, "percentage": 12.59, "elapsed_time": "2:13:31", "remaining_time": "15:26:37"}
|
| 234 |
+
{"current_steps": 234, "total_steps": 1850, "loss": 0.3727, "lr": 4.806845512068846e-06, "epoch": 1.2648648648648648, "percentage": 12.65, "elapsed_time": "2:13:35", "remaining_time": "15:22:32"}
|
| 235 |
+
{"current_steps": 235, "total_steps": 1850, "loss": 0.5326, "lr": 4.805205895357137e-06, "epoch": 1.2702702702702702, "percentage": 12.7, "elapsed_time": "2:13:36", "remaining_time": "15:18:11"}
|
| 236 |
+
{"current_steps": 236, "total_steps": 1850, "loss": 0.481, "lr": 4.803559631029713e-06, "epoch": 1.2756756756756757, "percentage": 12.76, "elapsed_time": "2:13:38", "remaining_time": "15:14:00"}
|
| 237 |
+
{"current_steps": 237, "total_steps": 1850, "loss": 0.3355, "lr": 4.801906723833973e-06, "epoch": 1.281081081081081, "percentage": 12.81, "elapsed_time": "2:13:41", "remaining_time": "15:09:53"}
|
| 238 |
+
{"current_steps": 238, "total_steps": 1850, "loss": 0.3704, "lr": 4.8002471785364734e-06, "epoch": 1.2864864864864864, "percentage": 12.86, "elapsed_time": "2:13:43", "remaining_time": "15:05:44"}
|
| 239 |
+
{"current_steps": 239, "total_steps": 1850, "loss": 0.511, "lr": 4.798580999922913e-06, "epoch": 1.291891891891892, "percentage": 12.92, "elapsed_time": "2:13:45", "remaining_time": "15:01:36"}
|
| 240 |
+
{"current_steps": 240, "total_steps": 1850, "loss": 0.4304, "lr": 4.796908192798117e-06, "epoch": 1.2972972972972974, "percentage": 12.97, "elapsed_time": "2:13:46", "remaining_time": "14:57:23"}
|
| 241 |
+
{"current_steps": 241, "total_steps": 1850, "loss": 0.4452, "lr": 4.7952287619860276e-06, "epoch": 1.3027027027027027, "percentage": 13.03, "elapsed_time": "2:13:48", "remaining_time": "14:53:23"}
|
| 242 |
+
{"current_steps": 242, "total_steps": 1850, "loss": 0.7567, "lr": 4.793542712329689e-06, "epoch": 1.308108108108108, "percentage": 13.08, "elapsed_time": "2:13:51", "remaining_time": "14:49:28"}
|
| 243 |
+
{"current_steps": 243, "total_steps": 1850, "loss": 0.3997, "lr": 4.791850048691228e-06, "epoch": 1.3135135135135134, "percentage": 13.14, "elapsed_time": "2:13:55", "remaining_time": "14:45:40"}
|
| 244 |
+
{"current_steps": 244, "total_steps": 1850, "loss": 0.4549, "lr": 4.79015077595185e-06, "epoch": 1.318918918918919, "percentage": 13.19, "elapsed_time": "2:14:01", "remaining_time": "14:42:06"}
|
| 245 |
+
{"current_steps": 245, "total_steps": 1850, "loss": 0.3966, "lr": 4.788444899011816e-06, "epoch": 1.3243243243243243, "percentage": 13.24, "elapsed_time": "2:14:03", "remaining_time": "14:38:14"}
|
| 246 |
+
{"current_steps": 246, "total_steps": 1850, "loss": 0.2961, "lr": 4.786732422790432e-06, "epoch": 1.3297297297297297, "percentage": 13.3, "elapsed_time": "2:14:06", "remaining_time": "14:34:26"}
|
| 247 |
+
{"current_steps": 247, "total_steps": 1850, "loss": 0.4778, "lr": 4.785013352226036e-06, "epoch": 1.3351351351351353, "percentage": 13.35, "elapsed_time": "2:14:07", "remaining_time": "14:30:28"}
|
| 248 |
+
{"current_steps": 248, "total_steps": 1850, "loss": 0.2098, "lr": 4.7832876922759805e-06, "epoch": 1.3405405405405406, "percentage": 13.41, "elapsed_time": "2:14:12", "remaining_time": "14:26:55"}
|
| 249 |
+
{"current_steps": 249, "total_steps": 1850, "loss": 0.5266, "lr": 4.781555447916622e-06, "epoch": 1.345945945945946, "percentage": 13.46, "elapsed_time": "2:14:17", "remaining_time": "14:23:25"}
|
| 250 |
+
{"current_steps": 250, "total_steps": 1850, "loss": 0.3362, "lr": 4.779816624143302e-06, "epoch": 1.3513513513513513, "percentage": 13.51, "elapsed_time": "2:14:18", "remaining_time": "14:19:34"}
|
| 251 |
+
{"current_steps": 251, "total_steps": 1850, "loss": 0.6469, "lr": 4.77807122597034e-06, "epoch": 1.3567567567567567, "percentage": 13.57, "elapsed_time": "2:14:22", "remaining_time": "14:16:01"}
|
| 252 |
+
{"current_steps": 252, "total_steps": 1850, "loss": 0.3717, "lr": 4.776319258431009e-06, "epoch": 1.3621621621621622, "percentage": 13.62, "elapsed_time": "2:14:26", "remaining_time": "14:12:30"}
|
| 253 |
+
{"current_steps": 253, "total_steps": 1850, "loss": 0.3343, "lr": 4.77456072657753e-06, "epoch": 1.3675675675675676, "percentage": 13.68, "elapsed_time": "2:14:28", "remaining_time": "14:08:49"}
|
| 254 |
+
{"current_steps": 254, "total_steps": 1850, "loss": 0.4309, "lr": 4.772795635481053e-06, "epoch": 1.372972972972973, "percentage": 13.73, "elapsed_time": "2:14:29", "remaining_time": "14:05:07"}
|
| 255 |
+
{"current_steps": 255, "total_steps": 1850, "loss": 0.7661, "lr": 4.77102399023164e-06, "epoch": 1.3783783783783785, "percentage": 13.78, "elapsed_time": "2:14:32", "remaining_time": "14:01:35"}
|
| 256 |
+
{"current_steps": 256, "total_steps": 1850, "loss": 0.3661, "lr": 4.769245795938261e-06, "epoch": 1.3837837837837839, "percentage": 13.84, "elapsed_time": "2:14:34", "remaining_time": "13:57:58"}
|
| 257 |
+
{"current_steps": 257, "total_steps": 1850, "loss": 0.3434, "lr": 4.767461057728763e-06, "epoch": 1.3891891891891892, "percentage": 13.89, "elapsed_time": "2:14:37", "remaining_time": "13:54:28"}
|
| 258 |
+
{"current_steps": 258, "total_steps": 1850, "loss": 0.5388, "lr": 4.76566978074987e-06, "epoch": 1.3945945945945946, "percentage": 13.95, "elapsed_time": "2:14:41", "remaining_time": "13:51:06"}
|
| 259 |
+
{"current_steps": 259, "total_steps": 1850, "loss": 0.4927, "lr": 4.7638719701671586e-06, "epoch": 1.4, "percentage": 14.0, "elapsed_time": "2:14:45", "remaining_time": "13:47:46"}
|
| 260 |
+
{"current_steps": 260, "total_steps": 1850, "loss": 0.7173, "lr": 4.762067631165049e-06, "epoch": 1.4054054054054055, "percentage": 14.05, "elapsed_time": "2:14:47", "remaining_time": "13:44:16"}
|
| 261 |
+
{"current_steps": 261, "total_steps": 1850, "loss": 0.3186, "lr": 4.760256768946787e-06, "epoch": 1.4108108108108108, "percentage": 14.11, "elapsed_time": "2:14:50", "remaining_time": "13:40:53"}
|
| 262 |
+
{"current_steps": 262, "total_steps": 1850, "loss": 0.6477, "lr": 4.758439388734429e-06, "epoch": 1.4162162162162162, "percentage": 14.16, "elapsed_time": "2:14:51", "remaining_time": "13:37:23"}
|
| 263 |
+
{"current_steps": 263, "total_steps": 1850, "loss": 0.8846, "lr": 4.7566154957688276e-06, "epoch": 1.4216216216216218, "percentage": 14.22, "elapsed_time": "2:14:53", "remaining_time": "13:33:56"}
|
| 264 |
+
{"current_steps": 264, "total_steps": 1850, "loss": 0.5446, "lr": 4.754785095309617e-06, "epoch": 1.427027027027027, "percentage": 14.27, "elapsed_time": "2:14:54", "remaining_time": "13:30:30"}
|
| 265 |
+
{"current_steps": 265, "total_steps": 1850, "loss": 0.395, "lr": 4.752948192635199e-06, "epoch": 1.4324324324324325, "percentage": 14.32, "elapsed_time": "2:14:56", "remaining_time": "13:27:06"}
|
| 266 |
+
{"current_steps": 266, "total_steps": 1850, "loss": 0.713, "lr": 4.751104793042722e-06, "epoch": 1.4378378378378378, "percentage": 14.38, "elapsed_time": "2:15:00", "remaining_time": "13:23:59"}
|
| 267 |
+
{"current_steps": 267, "total_steps": 1850, "loss": 0.4474, "lr": 4.7492549018480725e-06, "epoch": 1.4432432432432432, "percentage": 14.43, "elapsed_time": "2:15:03", "remaining_time": "13:20:46"}
|
| 268 |
+
{"current_steps": 268, "total_steps": 1850, "loss": 0.7754, "lr": 4.747398524385858e-06, "epoch": 1.4486486486486487, "percentage": 14.49, "elapsed_time": "2:15:06", "remaining_time": "13:17:30"}
|
| 269 |
+
{"current_steps": 269, "total_steps": 1850, "loss": 0.4424, "lr": 4.745535666009389e-06, "epoch": 1.454054054054054, "percentage": 14.54, "elapsed_time": "2:15:09", "remaining_time": "13:14:22"}
|
| 270 |
+
{"current_steps": 270, "total_steps": 1850, "loss": 0.3137, "lr": 4.743666332090664e-06, "epoch": 1.4594594594594594, "percentage": 14.59, "elapsed_time": "2:15:13", "remaining_time": "13:11:19"}
|
| 271 |
+
{"current_steps": 271, "total_steps": 1850, "loss": 0.49, "lr": 4.74179052802036e-06, "epoch": 1.464864864864865, "percentage": 14.65, "elapsed_time": "2:15:15", "remaining_time": "13:08:07"}
|
| 272 |
+
{"current_steps": 272, "total_steps": 1850, "loss": 0.6437, "lr": 4.739908259207807e-06, "epoch": 1.4702702702702704, "percentage": 14.7, "elapsed_time": "2:15:17", "remaining_time": "13:04:54"}
|
| 273 |
+
{"current_steps": 273, "total_steps": 1850, "loss": 0.4127, "lr": 4.738019531080981e-06, "epoch": 1.4756756756756757, "percentage": 14.76, "elapsed_time": "2:15:20", "remaining_time": "13:01:48"}
|
| 274 |
+
{"current_steps": 274, "total_steps": 1850, "loss": 0.4739, "lr": 4.7361243490864825e-06, "epoch": 1.481081081081081, "percentage": 14.81, "elapsed_time": "2:15:25", "remaining_time": "12:58:55"}
|
| 275 |
+
{"current_steps": 275, "total_steps": 1850, "loss": 0.5618, "lr": 4.734222718689527e-06, "epoch": 1.4864864864864864, "percentage": 14.86, "elapsed_time": "2:15:30", "remaining_time": "12:56:03"}
|
| 276 |
+
{"current_steps": 276, "total_steps": 1850, "loss": 0.405, "lr": 4.732314645373922e-06, "epoch": 1.491891891891892, "percentage": 14.92, "elapsed_time": "2:15:33", "remaining_time": "12:53:04"}
|
| 277 |
+
{"current_steps": 277, "total_steps": 1850, "loss": 0.5677, "lr": 4.730400134642055e-06, "epoch": 1.4972972972972973, "percentage": 14.97, "elapsed_time": "2:15:36", "remaining_time": "12:50:06"}
|
| 278 |
+
{"current_steps": 278, "total_steps": 1850, "loss": 0.7171, "lr": 4.728479192014879e-06, "epoch": 1.5027027027027027, "percentage": 15.03, "elapsed_time": "2:15:39", "remaining_time": "12:47:09"}
|
| 279 |
+
{"current_steps": 279, "total_steps": 1850, "loss": 0.4442, "lr": 4.726551823031895e-06, "epoch": 1.5081081081081082, "percentage": 15.08, "elapsed_time": "2:15:43", "remaining_time": "12:44:16"}
|
| 280 |
+
{"current_steps": 280, "total_steps": 1850, "loss": 0.4034, "lr": 4.7246180332511335e-06, "epoch": 1.5135135135135136, "percentage": 15.14, "elapsed_time": "2:15:45", "remaining_time": "12:41:15"}
|
| 281 |
+
{"current_steps": 281, "total_steps": 1850, "loss": 0.948, "lr": 4.722677828249142e-06, "epoch": 1.518918918918919, "percentage": 15.19, "elapsed_time": "2:15:49", "remaining_time": "12:38:21"}
|
| 282 |
+
{"current_steps": 282, "total_steps": 1850, "loss": 0.7035, "lr": 4.720731213620972e-06, "epoch": 1.5243243243243243, "percentage": 15.24, "elapsed_time": "2:15:52", "remaining_time": "12:35:27"}
|
| 283 |
+
{"current_steps": 283, "total_steps": 1850, "loss": 0.4648, "lr": 4.718778194980152e-06, "epoch": 1.5297297297297296, "percentage": 15.3, "elapsed_time": "2:15:54", "remaining_time": "12:32:34"}
|
| 284 |
+
{"current_steps": 284, "total_steps": 1850, "loss": 0.5399, "lr": 4.7168187779586805e-06, "epoch": 1.535135135135135, "percentage": 15.35, "elapsed_time": "2:15:57", "remaining_time": "12:29:42"}
|
| 285 |
+
{"current_steps": 285, "total_steps": 1850, "loss": 0.5363, "lr": 4.71485296820701e-06, "epoch": 1.5405405405405406, "percentage": 15.41, "elapsed_time": "2:15:59", "remaining_time": "12:26:45"}
|
| 286 |
+
{"current_steps": 286, "total_steps": 1850, "loss": 0.3132, "lr": 4.7128807713940245e-06, "epoch": 1.545945945945946, "percentage": 15.46, "elapsed_time": "2:16:04", "remaining_time": "12:24:06"}
|
| 287 |
+
{"current_steps": 287, "total_steps": 1850, "loss": 0.3854, "lr": 4.710902193207028e-06, "epoch": 1.5513513513513515, "percentage": 15.51, "elapsed_time": "2:16:09", "remaining_time": "12:21:31"}
|
| 288 |
+
{"current_steps": 288, "total_steps": 1850, "loss": 0.3459, "lr": 4.708917239351727e-06, "epoch": 1.5567567567567568, "percentage": 15.57, "elapsed_time": "2:16:12", "remaining_time": "12:18:43"}
|
| 289 |
+
{"current_steps": 289, "total_steps": 1850, "loss": 0.7623, "lr": 4.706925915552214e-06, "epoch": 1.5621621621621622, "percentage": 15.62, "elapsed_time": "2:16:14", "remaining_time": "12:15:51"}
|
| 290 |
+
{"current_steps": 290, "total_steps": 1850, "loss": 0.5251, "lr": 4.704928227550949e-06, "epoch": 1.5675675675675675, "percentage": 15.68, "elapsed_time": "2:16:17", "remaining_time": "12:13:11"}
|
| 291 |
+
{"current_steps": 291, "total_steps": 1850, "loss": 0.3659, "lr": 4.702924181108745e-06, "epoch": 1.572972972972973, "percentage": 15.73, "elapsed_time": "2:16:20", "remaining_time": "12:10:24"}
|
| 292 |
+
{"current_steps": 292, "total_steps": 1850, "loss": 0.3186, "lr": 4.700913782004755e-06, "epoch": 1.5783783783783782, "percentage": 15.78, "elapsed_time": "2:16:22", "remaining_time": "12:07:38"}
|
| 293 |
+
{"current_steps": 293, "total_steps": 1850, "loss": 0.4038, "lr": 4.698897036036446e-06, "epoch": 1.5837837837837838, "percentage": 15.84, "elapsed_time": "2:16:26", "remaining_time": "12:05:03"}
|
| 294 |
+
{"current_steps": 294, "total_steps": 1850, "loss": 0.7577, "lr": 4.696873949019591e-06, "epoch": 1.5891891891891892, "percentage": 15.89, "elapsed_time": "2:16:29", "remaining_time": "12:02:21"}
|
| 295 |
+
{"current_steps": 295, "total_steps": 1850, "loss": 0.3112, "lr": 4.694844526788248e-06, "epoch": 1.5945945945945947, "percentage": 15.95, "elapsed_time": "2:16:33", "remaining_time": "11:59:51"}
|
| 296 |
+
{"current_steps": 296, "total_steps": 1850, "loss": 0.319, "lr": 4.692808775194745e-06, "epoch": 1.6, "percentage": 16.0, "elapsed_time": "2:16:36", "remaining_time": "11:57:13"}
|
| 297 |
+
{"current_steps": 297, "total_steps": 1850, "loss": 0.3089, "lr": 4.690766700109659e-06, "epoch": 1.6054054054054054, "percentage": 16.05, "elapsed_time": "2:16:42", "remaining_time": "11:54:48"}
|
| 298 |
+
{"current_steps": 298, "total_steps": 1850, "loss": 0.7288, "lr": 4.688718307421807e-06, "epoch": 1.6108108108108108, "percentage": 16.11, "elapsed_time": "2:16:43", "remaining_time": "11:52:06"}
|
| 299 |
+
{"current_steps": 299, "total_steps": 1850, "loss": 0.5617, "lr": 4.686663603038222e-06, "epoch": 1.6162162162162161, "percentage": 16.16, "elapsed_time": "2:16:45", "remaining_time": "11:49:22"}
|
| 300 |
+
{"current_steps": 300, "total_steps": 1850, "loss": 0.6138, "lr": 4.6846025928841365e-06, "epoch": 1.6216216216216215, "percentage": 16.22, "elapsed_time": "2:16:49", "remaining_time": "11:46:57"}
|
| 301 |
+
{"current_steps": 301, "total_steps": 1850, "loss": 0.3983, "lr": 4.6825352829029705e-06, "epoch": 1.627027027027027, "percentage": 16.27, "elapsed_time": "2:16:53", "remaining_time": "11:44:27"}
|
| 302 |
+
{"current_steps": 302, "total_steps": 1850, "loss": 0.384, "lr": 4.68046167905631e-06, "epoch": 1.6324324324324324, "percentage": 16.32, "elapsed_time": "2:16:55", "remaining_time": "11:41:53"}
|
| 303 |
+
{"current_steps": 303, "total_steps": 1850, "loss": 0.6028, "lr": 4.678381787323889e-06, "epoch": 1.637837837837838, "percentage": 16.38, "elapsed_time": "2:17:00", "remaining_time": "11:39:30"}
|
| 304 |
+
{"current_steps": 304, "total_steps": 1850, "loss": 0.5691, "lr": 4.676295613703577e-06, "epoch": 1.6432432432432433, "percentage": 16.43, "elapsed_time": "2:17:03", "remaining_time": "11:37:03"}
|
| 305 |
+
{"current_steps": 305, "total_steps": 1850, "loss": 0.5875, "lr": 4.674203164211357e-06, "epoch": 1.6486486486486487, "percentage": 16.49, "elapsed_time": "2:17:06", "remaining_time": "11:34:30"}
|
| 306 |
+
{"current_steps": 306, "total_steps": 1850, "loss": 0.5387, "lr": 4.67210444488131e-06, "epoch": 1.654054054054054, "percentage": 16.54, "elapsed_time": "2:17:09", "remaining_time": "11:32:03"}
|
| 307 |
+
{"current_steps": 307, "total_steps": 1850, "loss": 0.5641, "lr": 4.669999461765599e-06, "epoch": 1.6594594594594594, "percentage": 16.59, "elapsed_time": "2:17:10", "remaining_time": "11:29:29"}
|
| 308 |
+
{"current_steps": 308, "total_steps": 1850, "loss": 0.6177, "lr": 4.6678882209344474e-06, "epoch": 1.6648648648648647, "percentage": 16.65, "elapsed_time": "2:17:14", "remaining_time": "11:27:06"}
|
| 309 |
+
{"current_steps": 309, "total_steps": 1850, "loss": 0.5682, "lr": 4.665770728476127e-06, "epoch": 1.6702702702702703, "percentage": 16.7, "elapsed_time": "2:17:17", "remaining_time": "11:24:42"}
|
| 310 |
+
{"current_steps": 310, "total_steps": 1850, "loss": 0.4661, "lr": 4.663646990496939e-06, "epoch": 1.6756756756756757, "percentage": 16.76, "elapsed_time": "2:17:23", "remaining_time": "11:22:29"}
|
| 311 |
+
{"current_steps": 311, "total_steps": 1850, "loss": 0.7667, "lr": 4.661517013121189e-06, "epoch": 1.6810810810810812, "percentage": 16.81, "elapsed_time": "2:17:26", "remaining_time": "11:20:06"}
|
| 312 |
+
{"current_steps": 312, "total_steps": 1850, "loss": 0.5567, "lr": 4.659380802491181e-06, "epoch": 1.6864864864864866, "percentage": 16.86, "elapsed_time": "2:17:26", "remaining_time": "11:17:33"}
|
| 313 |
+
{"current_steps": 313, "total_steps": 1850, "loss": 0.2624, "lr": 4.6572383647671915e-06, "epoch": 1.691891891891892, "percentage": 16.92, "elapsed_time": "2:17:29", "remaining_time": "11:15:09"}
|
| 314 |
+
{"current_steps": 314, "total_steps": 1850, "loss": 0.3569, "lr": 4.655089706127457e-06, "epoch": 1.6972972972972973, "percentage": 16.97, "elapsed_time": "2:17:35", "remaining_time": "11:13:02"}
|
| 315 |
+
{"current_steps": 315, "total_steps": 1850, "loss": 0.3578, "lr": 4.652934832768148e-06, "epoch": 1.7027027027027026, "percentage": 17.03, "elapsed_time": "2:17:40", "remaining_time": "11:10:54"}
|
| 316 |
+
{"current_steps": 316, "total_steps": 1850, "loss": 0.4817, "lr": 4.650773750903363e-06, "epoch": 1.708108108108108, "percentage": 17.08, "elapsed_time": "2:17:45", "remaining_time": "11:08:42"}
|
| 317 |
+
{"current_steps": 317, "total_steps": 1850, "loss": 0.4623, "lr": 4.6486064667651005e-06, "epoch": 1.7135135135135136, "percentage": 17.14, "elapsed_time": "2:17:49", "remaining_time": "11:06:30"}
|
| 318 |
+
{"current_steps": 318, "total_steps": 1850, "loss": 0.6034, "lr": 4.646432986603245e-06, "epoch": 1.718918918918919, "percentage": 17.19, "elapsed_time": "2:17:54", "remaining_time": "11:04:21"}
|
| 319 |
+
{"current_steps": 319, "total_steps": 1850, "loss": 0.5898, "lr": 4.644253316685552e-06, "epoch": 1.7243243243243245, "percentage": 17.24, "elapsed_time": "2:17:55", "remaining_time": "11:01:59"}
|
| 320 |
+
{"current_steps": 320, "total_steps": 1850, "loss": 0.4271, "lr": 4.6420674632976205e-06, "epoch": 1.7297297297297298, "percentage": 17.3, "elapsed_time": "2:17:58", "remaining_time": "10:59:43"}
|
| 321 |
+
{"current_steps": 321, "total_steps": 1850, "loss": 0.4306, "lr": 4.639875432742886e-06, "epoch": 1.7351351351351352, "percentage": 17.35, "elapsed_time": "2:17:59", "remaining_time": "10:57:18"}
|
| 322 |
+
{"current_steps": 322, "total_steps": 1850, "loss": 0.574, "lr": 4.6376772313425975e-06, "epoch": 1.7405405405405405, "percentage": 17.41, "elapsed_time": "2:18:00", "remaining_time": "10:54:56"}
|
| 323 |
+
{"current_steps": 323, "total_steps": 1850, "loss": 0.5807, "lr": 4.635472865435795e-06, "epoch": 1.7459459459459459, "percentage": 17.46, "elapsed_time": "2:18:03", "remaining_time": "10:52:42"}
|
| 324 |
+
{"current_steps": 324, "total_steps": 1850, "loss": 0.6298, "lr": 4.6332623413792995e-06, "epoch": 1.7513513513513512, "percentage": 17.51, "elapsed_time": "2:18:06", "remaining_time": "10:50:26"}
|
| 325 |
+
{"current_steps": 325, "total_steps": 1850, "loss": 0.338, "lr": 4.6310456655476874e-06, "epoch": 1.7567567567567568, "percentage": 17.57, "elapsed_time": "2:18:07", "remaining_time": "10:48:09"}
|
| 326 |
+
{"current_steps": 326, "total_steps": 1850, "loss": 0.3887, "lr": 4.6288228443332786e-06, "epoch": 1.7621621621621621, "percentage": 17.62, "elapsed_time": "2:18:09", "remaining_time": "10:45:51"}
|
| 327 |
+
{"current_steps": 327, "total_steps": 1850, "loss": 0.5677, "lr": 4.626593884146111e-06, "epoch": 1.7675675675675677, "percentage": 17.68, "elapsed_time": "2:18:12", "remaining_time": "10:43:43"}
|
| 328 |
+
{"current_steps": 328, "total_steps": 1850, "loss": 0.4496, "lr": 4.624358791413928e-06, "epoch": 1.772972972972973, "percentage": 17.73, "elapsed_time": "2:18:16", "remaining_time": "10:41:37"}
|
| 329 |
+
{"current_steps": 329, "total_steps": 1850, "loss": 0.519, "lr": 4.622117572582159e-06, "epoch": 1.7783783783783784, "percentage": 17.78, "elapsed_time": "2:18:19", "remaining_time": "10:39:30"}
|
| 330 |
+
{"current_steps": 330, "total_steps": 1850, "loss": 0.7875, "lr": 4.619870234113894e-06, "epoch": 1.7837837837837838, "percentage": 17.84, "elapsed_time": "2:18:20", "remaining_time": "10:37:14"}
|
| 331 |
+
{"current_steps": 331, "total_steps": 1850, "loss": 0.5327, "lr": 4.617616782489878e-06, "epoch": 1.7891891891891891, "percentage": 17.89, "elapsed_time": "2:18:23", "remaining_time": "10:35:07"}
|
| 332 |
+
{"current_steps": 332, "total_steps": 1850, "loss": 0.3327, "lr": 4.615357224208477e-06, "epoch": 1.7945945945945945, "percentage": 17.95, "elapsed_time": "2:18:28", "remaining_time": "10:33:10"}
|
| 333 |
+
{"current_steps": 333, "total_steps": 1850, "loss": 0.6386, "lr": 4.613091565785674e-06, "epoch": 1.8, "percentage": 18.0, "elapsed_time": "2:18:34", "remaining_time": "10:31:17"}
|
| 334 |
+
{"current_steps": 334, "total_steps": 1850, "loss": 0.4145, "lr": 4.610819813755038e-06, "epoch": 1.8054054054054054, "percentage": 18.05, "elapsed_time": "2:18:40", "remaining_time": "10:29:24"}
|
| 335 |
+
{"current_steps": 335, "total_steps": 1850, "loss": 0.339, "lr": 4.608541974667714e-06, "epoch": 1.810810810810811, "percentage": 18.11, "elapsed_time": "2:18:44", "remaining_time": "10:27:25"}
|
| 336 |
+
{"current_steps": 336, "total_steps": 1850, "loss": 0.4042, "lr": 4.606258055092397e-06, "epoch": 1.8162162162162163, "percentage": 18.16, "elapsed_time": "2:18:47", "remaining_time": "10:25:25"}
|
| 337 |
+
{"current_steps": 337, "total_steps": 1850, "loss": 0.4566, "lr": 4.603968061615321e-06, "epoch": 1.8216216216216217, "percentage": 18.22, "elapsed_time": "2:18:50", "remaining_time": "10:23:20"}
|
| 338 |
+
{"current_steps": 338, "total_steps": 1850, "loss": 0.7003, "lr": 4.601672000840231e-06, "epoch": 1.827027027027027, "percentage": 18.27, "elapsed_time": "2:18:53", "remaining_time": "10:21:17"}
|
| 339 |
+
{"current_steps": 339, "total_steps": 1850, "loss": 0.2668, "lr": 4.5993698793883715e-06, "epoch": 1.8324324324324324, "percentage": 18.32, "elapsed_time": "2:18:55", "remaining_time": "10:19:12"}
|
| 340 |
+
{"current_steps": 340, "total_steps": 1850, "loss": 0.8249, "lr": 4.597061703898462e-06, "epoch": 1.8378378378378377, "percentage": 18.38, "elapsed_time": "2:18:56", "remaining_time": "10:17:04"}
|
| 341 |
+
{"current_steps": 341, "total_steps": 1850, "loss": 0.3457, "lr": 4.594747481026685e-06, "epoch": 1.8432432432432433, "percentage": 18.43, "elapsed_time": "2:19:00", "remaining_time": "10:15:06"}
|
| 342 |
+
{"current_steps": 342, "total_steps": 1850, "loss": 0.325, "lr": 4.592427217446656e-06, "epoch": 1.8486486486486486, "percentage": 18.49, "elapsed_time": "2:19:01", "remaining_time": "10:13:01"}
|
| 343 |
+
{"current_steps": 343, "total_steps": 1850, "loss": 0.8412, "lr": 4.590100919849413e-06, "epoch": 1.8540540540540542, "percentage": 18.54, "elapsed_time": "2:19:03", "remaining_time": "10:10:57"}
|
| 344 |
+
{"current_steps": 344, "total_steps": 1850, "loss": 0.4915, "lr": 4.587768594943396e-06, "epoch": 1.8594594594594596, "percentage": 18.59, "elapsed_time": "2:19:06", "remaining_time": "10:08:59"}
|
| 345 |
+
{"current_steps": 345, "total_steps": 1850, "loss": 0.3661, "lr": 4.585430249454426e-06, "epoch": 1.864864864864865, "percentage": 18.65, "elapsed_time": "2:19:07", "remaining_time": "10:06:56"}
|
| 346 |
+
{"current_steps": 346, "total_steps": 1850, "loss": 0.5055, "lr": 4.583085890125682e-06, "epoch": 1.8702702702702703, "percentage": 18.7, "elapsed_time": "2:19:10", "remaining_time": "10:04:57"}
|
| 347 |
+
{"current_steps": 347, "total_steps": 1850, "loss": 0.4834, "lr": 4.5807355237176896e-06, "epoch": 1.8756756756756756, "percentage": 18.76, "elapsed_time": "2:19:16", "remaining_time": "10:03:13"}
|
| 348 |
+
{"current_steps": 348, "total_steps": 1850, "loss": 0.3639, "lr": 4.578379157008296e-06, "epoch": 1.881081081081081, "percentage": 18.81, "elapsed_time": "2:19:19", "remaining_time": "10:01:18"}
|
| 349 |
+
{"current_steps": 349, "total_steps": 1850, "loss": 0.3676, "lr": 4.57601679679265e-06, "epoch": 1.8864864864864865, "percentage": 18.86, "elapsed_time": "2:19:22", "remaining_time": "9:59:24"}
|
| 350 |
+
{"current_steps": 350, "total_steps": 1850, "loss": 0.5806, "lr": 4.573648449883188e-06, "epoch": 1.8918918918918919, "percentage": 18.92, "elapsed_time": "2:19:25", "remaining_time": "9:57:32"}
|
| 351 |
+
{"current_steps": 351, "total_steps": 1850, "loss": 0.3274, "lr": 4.571274123109606e-06, "epoch": 1.8972972972972975, "percentage": 18.97, "elapsed_time": "2:19:26", "remaining_time": "9:55:32"}
|
| 352 |
+
{"current_steps": 352, "total_steps": 1850, "loss": 0.5008, "lr": 4.568893823318847e-06, "epoch": 1.9027027027027028, "percentage": 19.03, "elapsed_time": "2:19:32", "remaining_time": "9:53:51"}
|
| 353 |
+
{"current_steps": 353, "total_steps": 1850, "loss": 0.4848, "lr": 4.566507557375077e-06, "epoch": 1.9081081081081082, "percentage": 19.08, "elapsed_time": "2:19:36", "remaining_time": "9:52:03"}
|
| 354 |
+
{"current_steps": 354, "total_steps": 1850, "loss": 0.3779, "lr": 4.5641153321596684e-06, "epoch": 1.9135135135135135, "percentage": 19.14, "elapsed_time": "2:19:38", "remaining_time": "9:50:08"}
|
| 355 |
+
{"current_steps": 355, "total_steps": 1850, "loss": 0.7481, "lr": 4.56171715457118e-06, "epoch": 1.9189189189189189, "percentage": 19.19, "elapsed_time": "2:19:40", "remaining_time": "9:48:13"}
|
| 356 |
+
{"current_steps": 356, "total_steps": 1850, "loss": 0.435, "lr": 4.559313031525331e-06, "epoch": 1.9243243243243242, "percentage": 19.24, "elapsed_time": "2:19:42", "remaining_time": "9:46:19"}
|
| 357 |
+
{"current_steps": 357, "total_steps": 1850, "loss": 0.4895, "lr": 4.55690296995499e-06, "epoch": 1.9297297297297298, "percentage": 19.3, "elapsed_time": "2:19:44", "remaining_time": "9:44:24"}
|
| 358 |
+
{"current_steps": 358, "total_steps": 1850, "loss": 0.8648, "lr": 4.554486976810149e-06, "epoch": 1.9351351351351351, "percentage": 19.35, "elapsed_time": "2:19:45", "remaining_time": "9:42:29"}
|
| 359 |
+
{"current_steps": 359, "total_steps": 1850, "loss": 0.578, "lr": 4.552065059057906e-06, "epoch": 1.9405405405405407, "percentage": 19.41, "elapsed_time": "2:19:47", "remaining_time": "9:40:36"}
|
| 360 |
+
{"current_steps": 360, "total_steps": 1850, "loss": 0.9823, "lr": 4.549637223682441e-06, "epoch": 1.945945945945946, "percentage": 19.46, "elapsed_time": "2:19:49", "remaining_time": "9:38:41"}
|
| 361 |
+
{"current_steps": 361, "total_steps": 1850, "loss": 0.6065, "lr": 4.547203477685005e-06, "epoch": 1.9513513513513514, "percentage": 19.51, "elapsed_time": "2:19:53", "remaining_time": "9:36:59"}
|
| 362 |
+
{"current_steps": 362, "total_steps": 1850, "loss": 0.3985, "lr": 4.544763828083888e-06, "epoch": 1.9567567567567568, "percentage": 19.57, "elapsed_time": "2:19:57", "remaining_time": "9:35:18"}
|
| 363 |
+
{"current_steps": 363, "total_steps": 1850, "loss": 0.6149, "lr": 4.542318281914405e-06, "epoch": 1.962162162162162, "percentage": 19.62, "elapsed_time": "2:20:00", "remaining_time": "9:33:33"}
|
| 364 |
+
{"current_steps": 364, "total_steps": 1850, "loss": 0.5578, "lr": 4.53986684622888e-06, "epoch": 1.9675675675675675, "percentage": 19.68, "elapsed_time": "2:20:04", "remaining_time": "9:31:51"}
|
| 365 |
+
{"current_steps": 365, "total_steps": 1850, "loss": 0.4855, "lr": 4.537409528096615e-06, "epoch": 1.972972972972973, "percentage": 19.73, "elapsed_time": "2:20:06", "remaining_time": "9:30:00"}
|
| 366 |
+
{"current_steps": 366, "total_steps": 1850, "loss": 0.3366, "lr": 4.534946334603879e-06, "epoch": 1.9783783783783784, "percentage": 19.78, "elapsed_time": "2:20:09", "remaining_time": "9:28:16"}
|
| 367 |
+
{"current_steps": 367, "total_steps": 1850, "loss": 0.4031, "lr": 4.532477272853882e-06, "epoch": 1.983783783783784, "percentage": 19.84, "elapsed_time": "2:20:12", "remaining_time": "9:26:35"}
|
| 368 |
+
{"current_steps": 368, "total_steps": 1850, "loss": 0.3215, "lr": 4.530002349966759e-06, "epoch": 1.9891891891891893, "percentage": 19.89, "elapsed_time": "2:20:14", "remaining_time": "9:24:48"}
|
| 369 |
+
{"current_steps": 369, "total_steps": 1850, "loss": 0.4393, "lr": 4.5275215730795445e-06, "epoch": 1.9945945945945946, "percentage": 19.95, "elapsed_time": "2:20:17", "remaining_time": "9:23:03"}
|
| 370 |
+
{"current_steps": 370, "total_steps": 1850, "loss": 0.4536, "lr": 4.525034949346156e-06, "epoch": 2.0, "percentage": 20.0, "elapsed_time": "2:20:18", "remaining_time": "9:21:14"}
|
qwen2_5_math_7b/limo/checkpoint-1230/added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
qwen2_5_math_7b/limo/checkpoint-1230/config.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen2ForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 151643,
|
| 7 |
+
"eos_token_id": 151643,
|
| 8 |
+
"hidden_act": "silu",
|
| 9 |
+
"hidden_size": 3584,
|
| 10 |
+
"initializer_range": 0.02,
|
| 11 |
+
"intermediate_size": 18944,
|
| 12 |
+
"layer_types": [
|
| 13 |
+
"full_attention",
|
| 14 |
+
"full_attention",
|
| 15 |
+
"full_attention",
|
| 16 |
+
"full_attention",
|
| 17 |
+
"full_attention",
|
| 18 |
+
"full_attention",
|
| 19 |
+
"full_attention",
|
| 20 |
+
"full_attention",
|
| 21 |
+
"full_attention",
|
| 22 |
+
"full_attention",
|
| 23 |
+
"full_attention",
|
| 24 |
+
"full_attention",
|
| 25 |
+
"full_attention",
|
| 26 |
+
"full_attention",
|
| 27 |
+
"full_attention",
|
| 28 |
+
"full_attention",
|
| 29 |
+
"full_attention",
|
| 30 |
+
"full_attention",
|
| 31 |
+
"full_attention",
|
| 32 |
+
"full_attention",
|
| 33 |
+
"full_attention",
|
| 34 |
+
"full_attention",
|
| 35 |
+
"full_attention",
|
| 36 |
+
"full_attention",
|
| 37 |
+
"full_attention",
|
| 38 |
+
"full_attention",
|
| 39 |
+
"full_attention",
|
| 40 |
+
"full_attention"
|
| 41 |
+
],
|
| 42 |
+
"max_position_embeddings": 4096,
|
| 43 |
+
"max_window_layers": 28,
|
| 44 |
+
"model_type": "qwen2",
|
| 45 |
+
"num_attention_heads": 28,
|
| 46 |
+
"num_hidden_layers": 28,
|
| 47 |
+
"num_key_value_heads": 4,
|
| 48 |
+
"rms_norm_eps": 1e-06,
|
| 49 |
+
"rope_scaling": null,
|
| 50 |
+
"rope_theta": 10000,
|
| 51 |
+
"sliding_window": null,
|
| 52 |
+
"tie_word_embeddings": false,
|
| 53 |
+
"torch_dtype": "float32",
|
| 54 |
+
"transformers_version": "4.55.0",
|
| 55 |
+
"use_cache": false,
|
| 56 |
+
"use_mrope": false,
|
| 57 |
+
"use_sliding_window": false,
|
| 58 |
+
"vocab_size": 152064
|
| 59 |
+
}
|
qwen2_5_math_7b/limo/checkpoint-1230/generation_config.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"eos_token_id": 151643,
|
| 4 |
+
"max_new_tokens": 2048,
|
| 5 |
+
"transformers_version": "4.55.0"
|
| 6 |
+
}
|
qwen2_5_math_7b/limo/checkpoint-1230/model.safetensors.index.json
ADDED
|
@@ -0,0 +1,347 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_parameters": 1903904128,
|
| 4 |
+
"total_size": 30462466048
|
| 5 |
+
},
|
| 6 |
+
"weight_map": {
|
| 7 |
+
"lm_head.weight": "model-00007-of-00007.safetensors",
|
| 8 |
+
"model.embed_tokens.weight": "model-00001-of-00007.safetensors",
|
| 9 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 10 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
|
| 11 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
|
| 12 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
|
| 13 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
|
| 20 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
|
| 21 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 22 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
|
| 23 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
|
| 24 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
|
| 25 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
|
| 32 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
|
| 33 |
+
"model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 34 |
+
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 35 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 36 |
+
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 37 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 44 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 45 |
+
"model.layers.11.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 46 |
+
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 47 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 48 |
+
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 49 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 56 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 57 |
+
"model.layers.12.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 58 |
+
"model.layers.12.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 59 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 60 |
+
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 61 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 68 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 69 |
+
"model.layers.13.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 70 |
+
"model.layers.13.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 71 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 72 |
+
"model.layers.13.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 73 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 80 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 81 |
+
"model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 82 |
+
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 83 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 84 |
+
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 85 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 92 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 93 |
+
"model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 94 |
+
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 95 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 96 |
+
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 97 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 104 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 105 |
+
"model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 106 |
+
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 107 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 108 |
+
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 109 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 116 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 117 |
+
"model.layers.17.input_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 118 |
+
"model.layers.17.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
|
| 119 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 120 |
+
"model.layers.17.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 121 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 128 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 129 |
+
"model.layers.18.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 130 |
+
"model.layers.18.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 131 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
|
| 132 |
+
"model.layers.18.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
|
| 133 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
|
| 140 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
|
| 141 |
+
"model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 142 |
+
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 143 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 144 |
+
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 145 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 152 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 153 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 154 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
|
| 155 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
|
| 156 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
|
| 157 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
|
| 164 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
|
| 165 |
+
"model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 166 |
+
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 167 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 168 |
+
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 169 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 176 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 177 |
+
"model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 178 |
+
"model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 179 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 180 |
+
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 181 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 188 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 189 |
+
"model.layers.22.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 190 |
+
"model.layers.22.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 191 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 192 |
+
"model.layers.22.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 193 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 200 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 201 |
+
"model.layers.23.input_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 202 |
+
"model.layers.23.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
|
| 203 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
|
| 204 |
+
"model.layers.23.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
|
| 205 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 212 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 213 |
+
"model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 214 |
+
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 215 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 216 |
+
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 217 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
|
| 224 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
|
| 225 |
+
"model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 226 |
+
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 227 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 228 |
+
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 229 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
|
| 236 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
|
| 237 |
+
"model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 238 |
+
"model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 239 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 240 |
+
"model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 241 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
|
| 248 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
|
| 249 |
+
"model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 250 |
+
"model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
|
| 251 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
|
| 252 |
+
"model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
|
| 253 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
|
| 260 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
|
| 261 |
+
"model.layers.3.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 262 |
+
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 263 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 264 |
+
"model.layers.3.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 265 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 272 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 273 |
+
"model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 274 |
+
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 275 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 276 |
+
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 277 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 284 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 285 |
+
"model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 286 |
+
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 287 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 288 |
+
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 289 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 296 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 297 |
+
"model.layers.6.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 298 |
+
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 299 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 300 |
+
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 301 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 308 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 309 |
+
"model.layers.7.input_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 310 |
+
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
|
| 311 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
|
| 312 |
+
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
|
| 313 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 320 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 321 |
+
"model.layers.8.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 322 |
+
"model.layers.8.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 323 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 324 |
+
"model.layers.8.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 325 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
|
| 332 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
|
| 333 |
+
"model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 334 |
+
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
|
| 335 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
|
| 336 |
+
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
|
| 337 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
|
| 344 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
|
| 345 |
+
"model.norm.weight": "model-00006-of-00007.safetensors"
|
| 346 |
+
}
|
| 347 |
+
}
|
qwen2_5_math_7b/limo/checkpoint-1230/special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|