KublaiKhan1 commited on
Commit
dae8088
·
verified ·
1 Parent(s): 8f4ada1

Delete qwen2_5_7b_instruct

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. qwen2_5_7b_instruct/limo/checkpoint-205/added_tokens.json +0 -24
  2. qwen2_5_7b_instruct/limo/checkpoint-205/chat_template.jinja +0 -54
  3. qwen2_5_7b_instruct/limo/checkpoint-205/config.json +0 -58
  4. qwen2_5_7b_instruct/limo/checkpoint-205/generation_config.json +0 -7
  5. qwen2_5_7b_instruct/limo/checkpoint-205/merges.txt +0 -0
  6. qwen2_5_7b_instruct/limo/checkpoint-205/model-00001-of-00007.safetensors +0 -3
  7. qwen2_5_7b_instruct/limo/checkpoint-205/model-00002-of-00007.safetensors +0 -3
  8. qwen2_5_7b_instruct/limo/checkpoint-205/model-00003-of-00007.safetensors +0 -3
  9. qwen2_5_7b_instruct/limo/checkpoint-205/model-00004-of-00007.safetensors +0 -3
  10. qwen2_5_7b_instruct/limo/checkpoint-205/model-00005-of-00007.safetensors +0 -3
  11. qwen2_5_7b_instruct/limo/checkpoint-205/model-00006-of-00007.safetensors +0 -3
  12. qwen2_5_7b_instruct/limo/checkpoint-205/model-00007-of-00007.safetensors +0 -3
  13. qwen2_5_7b_instruct/limo/checkpoint-205/model.safetensors.index.json +0 -347
  14. qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_0.pth +0 -3
  15. qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_1.pth +0 -3
  16. qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_2.pth +0 -3
  17. qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_3.pth +0 -3
  18. qwen2_5_7b_instruct/limo/checkpoint-205/scheduler.pt +0 -3
  19. qwen2_5_7b_instruct/limo/checkpoint-205/special_tokens_map.json +0 -31
  20. qwen2_5_7b_instruct/limo/checkpoint-205/tokenizer.json +0 -3
  21. qwen2_5_7b_instruct/limo/checkpoint-205/tokenizer_config.json +0 -208
  22. qwen2_5_7b_instruct/limo/checkpoint-205/trainer_state.json +0 -1469
  23. qwen2_5_7b_instruct/limo/checkpoint-205/vocab.json +0 -0
  24. qwen2_5_7b_instruct/limo/checkpoint-410/added_tokens.json +0 -24
  25. qwen2_5_7b_instruct/limo/checkpoint-410/chat_template.jinja +0 -54
  26. qwen2_5_7b_instruct/limo/checkpoint-410/config.json +0 -58
  27. qwen2_5_7b_instruct/limo/checkpoint-410/generation_config.json +0 -7
  28. qwen2_5_7b_instruct/limo/checkpoint-410/merges.txt +0 -0
  29. qwen2_5_7b_instruct/limo/checkpoint-410/model-00001-of-00007.safetensors +0 -3
  30. qwen2_5_7b_instruct/limo/checkpoint-410/model-00002-of-00007.safetensors +0 -3
  31. qwen2_5_7b_instruct/limo/checkpoint-410/model-00003-of-00007.safetensors +0 -3
  32. qwen2_5_7b_instruct/limo/checkpoint-410/model-00004-of-00007.safetensors +0 -3
  33. qwen2_5_7b_instruct/limo/checkpoint-410/model-00005-of-00007.safetensors +0 -3
  34. qwen2_5_7b_instruct/limo/checkpoint-410/model-00006-of-00007.safetensors +0 -3
  35. qwen2_5_7b_instruct/limo/checkpoint-410/model-00007-of-00007.safetensors +0 -3
  36. qwen2_5_7b_instruct/limo/checkpoint-410/model.safetensors.index.json +0 -347
  37. qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_0.pth +0 -3
  38. qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_1.pth +0 -3
  39. qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_2.pth +0 -3
  40. qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_3.pth +0 -3
  41. qwen2_5_7b_instruct/limo/checkpoint-410/scheduler.pt +0 -3
  42. qwen2_5_7b_instruct/limo/checkpoint-410/special_tokens_map.json +0 -31
  43. qwen2_5_7b_instruct/limo/checkpoint-410/tokenizer.json +0 -3
  44. qwen2_5_7b_instruct/limo/checkpoint-410/tokenizer_config.json +0 -208
  45. qwen2_5_7b_instruct/limo/checkpoint-410/trainer_state.json +0 -2904
  46. qwen2_5_7b_instruct/limo/checkpoint-410/vocab.json +0 -0
  47. qwen2_5_7b_instruct/limo/checkpoint-615/added_tokens.json +0 -24
  48. qwen2_5_7b_instruct/limo/checkpoint-615/chat_template.jinja +0 -54
  49. qwen2_5_7b_instruct/limo/checkpoint-615/config.json +0 -58
  50. qwen2_5_7b_instruct/limo/checkpoint-615/generation_config.json +0 -7
qwen2_5_7b_instruct/limo/checkpoint-205/added_tokens.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "</tool_call>": 151658,
3
- "<tool_call>": 151657,
4
- "<|box_end|>": 151649,
5
- "<|box_start|>": 151648,
6
- "<|endoftext|>": 151643,
7
- "<|file_sep|>": 151664,
8
- "<|fim_middle|>": 151660,
9
- "<|fim_pad|>": 151662,
10
- "<|fim_prefix|>": 151659,
11
- "<|fim_suffix|>": 151661,
12
- "<|im_end|>": 151645,
13
- "<|im_start|>": 151644,
14
- "<|image_pad|>": 151655,
15
- "<|object_ref_end|>": 151647,
16
- "<|object_ref_start|>": 151646,
17
- "<|quad_end|>": 151651,
18
- "<|quad_start|>": 151650,
19
- "<|repo_name|>": 151663,
20
- "<|video_pad|>": 151656,
21
- "<|vision_end|>": 151653,
22
- "<|vision_pad|>": 151654,
23
- "<|vision_start|>": 151652
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/chat_template.jinja DELETED
@@ -1,54 +0,0 @@
1
- {%- if tools %}
2
- {{- '<|im_start|>system\n' }}
3
- {%- if messages[0]['role'] == 'system' %}
4
- {{- messages[0]['content'] }}
5
- {%- else %}
6
- {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
- {%- endif %}
8
- {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
- {%- for tool in tools %}
10
- {{- "\n" }}
11
- {{- tool | tojson }}
12
- {%- endfor %}
13
- {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
- {%- else %}
15
- {%- if messages[0]['role'] == 'system' %}
16
- {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
- {%- else %}
18
- {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
- {%- endif %}
20
- {%- endif %}
21
- {%- for message in messages %}
22
- {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
- {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
- {%- elif message.role == "assistant" %}
25
- {{- '<|im_start|>' + message.role }}
26
- {%- if message.content %}
27
- {{- '\n' + message.content }}
28
- {%- endif %}
29
- {%- for tool_call in message.tool_calls %}
30
- {%- if tool_call.function is defined %}
31
- {%- set tool_call = tool_call.function %}
32
- {%- endif %}
33
- {{- '\n<tool_call>\n{"name": "' }}
34
- {{- tool_call.name }}
35
- {{- '", "arguments": ' }}
36
- {{- tool_call.arguments | tojson }}
37
- {{- '}\n</tool_call>' }}
38
- {%- endfor %}
39
- {{- '<|im_end|>\n' }}
40
- {%- elif message.role == "tool" %}
41
- {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
- {{- '<|im_start|>user' }}
43
- {%- endif %}
44
- {{- '\n<tool_response>\n' }}
45
- {{- message.content }}
46
- {{- '\n</tool_response>' }}
47
- {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
- {{- '<|im_end|>\n' }}
49
- {%- endif %}
50
- {%- endif %}
51
- {%- endfor %}
52
- {%- if add_generation_prompt %}
53
- {{- '<|im_start|>assistant\n' }}
54
- {%- endif %}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/config.json DELETED
@@ -1,58 +0,0 @@
1
- {
2
- "architectures": [
3
- "Qwen2ForCausalLM"
4
- ],
5
- "attention_dropout": 0.0,
6
- "bos_token_id": 151643,
7
- "eos_token_id": 151645,
8
- "hidden_act": "silu",
9
- "hidden_size": 3584,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 18944,
12
- "layer_types": [
13
- "full_attention",
14
- "full_attention",
15
- "full_attention",
16
- "full_attention",
17
- "full_attention",
18
- "full_attention",
19
- "full_attention",
20
- "full_attention",
21
- "full_attention",
22
- "full_attention",
23
- "full_attention",
24
- "full_attention",
25
- "full_attention",
26
- "full_attention",
27
- "full_attention",
28
- "full_attention",
29
- "full_attention",
30
- "full_attention",
31
- "full_attention",
32
- "full_attention",
33
- "full_attention",
34
- "full_attention",
35
- "full_attention",
36
- "full_attention",
37
- "full_attention",
38
- "full_attention",
39
- "full_attention",
40
- "full_attention"
41
- ],
42
- "max_position_embeddings": 32768,
43
- "max_window_layers": 28,
44
- "model_type": "qwen2",
45
- "num_attention_heads": 28,
46
- "num_hidden_layers": 28,
47
- "num_key_value_heads": 4,
48
- "rms_norm_eps": 1e-06,
49
- "rope_scaling": null,
50
- "rope_theta": 1000000.0,
51
- "sliding_window": null,
52
- "tie_word_embeddings": false,
53
- "torch_dtype": "float32",
54
- "transformers_version": "4.55.0",
55
- "use_cache": false,
56
- "use_sliding_window": false,
57
- "vocab_size": 152064
58
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 151643,
4
- "eos_token_id": 151645,
5
- "transformers_version": "4.55.0",
6
- "use_cache": false
7
- }
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00001-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:73777262838c632161e2dd7e94d6778e7f788163394d5f8ebcc520f3f23735bf
3
- size 4976687216
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00002-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f37cfafe4e28a97d86378857f0a2c9279eb1d834d58c38f9532126650013cf01
3
- size 4778622352
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00003-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c16b67f2c440ba9e48b542d6fb8e2a2d7314aafdf5c5e84d24d4a4f28b9fa649
3
- size 4932743960
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00004-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5eb92011bc553a0d2d3632065b18f04dce95da7d1756a5223ee2d3dc8076025b
3
- size 4932743992
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00005-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1b098a7099babb3aca0fa240e6780a7a96275e5785261544fdbe8c8f5845ef1c
3
- size 4998852296
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00006-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d13e4625ed1f92df939a8f0b972735d8ad58dfc8498fd520b0bd64cfdb9af3f
3
- size 3662865184
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model-00007-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e2fc618cbeb9f42ae86b4dc8efcb23f7af1a7c3d308771d9459873167d5b5030
3
- size 2179989632
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/model.safetensors.index.json DELETED
@@ -1,347 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_parameters": 1903904128,
4
- "total_size": 30462466048
5
- },
6
- "weight_map": {
7
- "lm_head.weight": "model-00007-of-00007.safetensors",
8
- "model.embed_tokens.weight": "model-00001-of-00007.safetensors",
9
- "model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
10
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
11
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
12
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
13
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
14
- "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
15
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
16
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
17
- "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
18
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
19
- "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
20
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
21
- "model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
22
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
23
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
24
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
25
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
26
- "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
27
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
28
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
29
- "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
30
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
31
- "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
32
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
33
- "model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
34
- "model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
35
- "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
36
- "model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
37
- "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
38
- "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
39
- "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
40
- "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
41
- "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
42
- "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
43
- "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
44
- "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
45
- "model.layers.11.input_layernorm.weight": "model-00003-of-00007.safetensors",
46
- "model.layers.11.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
47
- "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
48
- "model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
49
- "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
50
- "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
51
- "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
52
- "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
53
- "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
54
- "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
55
- "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
56
- "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
57
- "model.layers.12.input_layernorm.weight": "model-00003-of-00007.safetensors",
58
- "model.layers.12.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
59
- "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
60
- "model.layers.12.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
61
- "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
62
- "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
63
- "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
64
- "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
65
- "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
66
- "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
67
- "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
68
- "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
69
- "model.layers.13.input_layernorm.weight": "model-00004-of-00007.safetensors",
70
- "model.layers.13.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
71
- "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
72
- "model.layers.13.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
73
- "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
74
- "model.layers.13.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
75
- "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
76
- "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
77
- "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
78
- "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
79
- "model.layers.13.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
80
- "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
81
- "model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
82
- "model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
83
- "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
84
- "model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
85
- "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
86
- "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
87
- "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
88
- "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
89
- "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
90
- "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
91
- "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
92
- "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
93
- "model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
94
- "model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
95
- "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
96
- "model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
97
- "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
98
- "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
99
- "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
100
- "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
101
- "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
102
- "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
103
- "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
104
- "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
105
- "model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
106
- "model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
107
- "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
108
- "model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
109
- "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
110
- "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
111
- "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
112
- "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
113
- "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
114
- "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
115
- "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
116
- "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
117
- "model.layers.17.input_layernorm.weight": "model-00004-of-00007.safetensors",
118
- "model.layers.17.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
119
- "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
120
- "model.layers.17.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
121
- "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
122
- "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
123
- "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
124
- "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
125
- "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
126
- "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
127
- "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
128
- "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
129
- "model.layers.18.input_layernorm.weight": "model-00005-of-00007.safetensors",
130
- "model.layers.18.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
131
- "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
132
- "model.layers.18.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
133
- "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
134
- "model.layers.18.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
135
- "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
136
- "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
137
- "model.layers.18.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
138
- "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
139
- "model.layers.18.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
140
- "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
141
- "model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
142
- "model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
143
- "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
144
- "model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
145
- "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
146
- "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
147
- "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
148
- "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
149
- "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
150
- "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
151
- "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
152
- "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
153
- "model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
154
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
155
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
156
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
157
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
158
- "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
159
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
160
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
161
- "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
162
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
163
- "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
164
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
165
- "model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
166
- "model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
167
- "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
168
- "model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
169
- "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
170
- "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
171
- "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
172
- "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
173
- "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
174
- "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
175
- "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
176
- "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
177
- "model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
178
- "model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
179
- "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
180
- "model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
181
- "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
182
- "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
183
- "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
184
- "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
185
- "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
186
- "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
187
- "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
188
- "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
189
- "model.layers.22.input_layernorm.weight": "model-00005-of-00007.safetensors",
190
- "model.layers.22.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
191
- "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
192
- "model.layers.22.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
193
- "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
194
- "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
195
- "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
196
- "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
197
- "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
198
- "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
199
- "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
200
- "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
201
- "model.layers.23.input_layernorm.weight": "model-00005-of-00007.safetensors",
202
- "model.layers.23.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
203
- "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
204
- "model.layers.23.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
205
- "model.layers.23.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
206
- "model.layers.23.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
207
- "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
208
- "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
209
- "model.layers.23.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
210
- "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
211
- "model.layers.23.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
212
- "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
213
- "model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
214
- "model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
215
- "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
216
- "model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
217
- "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
218
- "model.layers.24.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
219
- "model.layers.24.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
220
- "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
221
- "model.layers.24.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
222
- "model.layers.24.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
223
- "model.layers.24.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
224
- "model.layers.24.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
225
- "model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
226
- "model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
227
- "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
228
- "model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
229
- "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
230
- "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
231
- "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
232
- "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
233
- "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
234
- "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
235
- "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
236
- "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
237
- "model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
238
- "model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
239
- "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
240
- "model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
241
- "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
242
- "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
243
- "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
244
- "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
245
- "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
246
- "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
247
- "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
248
- "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
249
- "model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
250
- "model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
251
- "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
252
- "model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
253
- "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
254
- "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
255
- "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
256
- "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
257
- "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
258
- "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
259
- "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
260
- "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
261
- "model.layers.3.input_layernorm.weight": "model-00002-of-00007.safetensors",
262
- "model.layers.3.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
263
- "model.layers.3.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
264
- "model.layers.3.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
265
- "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
266
- "model.layers.3.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
267
- "model.layers.3.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
268
- "model.layers.3.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
269
- "model.layers.3.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
270
- "model.layers.3.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
271
- "model.layers.3.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
272
- "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
273
- "model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
274
- "model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
275
- "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
276
- "model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
277
- "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
278
- "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
279
- "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
280
- "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
281
- "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
282
- "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
283
- "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
284
- "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
285
- "model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
286
- "model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
287
- "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
288
- "model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
289
- "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
290
- "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
291
- "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
292
- "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
293
- "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
294
- "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
295
- "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
296
- "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
297
- "model.layers.6.input_layernorm.weight": "model-00002-of-00007.safetensors",
298
- "model.layers.6.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
299
- "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
300
- "model.layers.6.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
301
- "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
302
- "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
303
- "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
304
- "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
305
- "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
306
- "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
307
- "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
308
- "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
309
- "model.layers.7.input_layernorm.weight": "model-00002-of-00007.safetensors",
310
- "model.layers.7.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
311
- "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
312
- "model.layers.7.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
313
- "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
314
- "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
315
- "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
316
- "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
317
- "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
318
- "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
319
- "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
320
- "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
321
- "model.layers.8.input_layernorm.weight": "model-00003-of-00007.safetensors",
322
- "model.layers.8.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
323
- "model.layers.8.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
324
- "model.layers.8.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
325
- "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
326
- "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
327
- "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
328
- "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
329
- "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
330
- "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
331
- "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
332
- "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
333
- "model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
334
- "model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
335
- "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
336
- "model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
337
- "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
338
- "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
339
- "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
340
- "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
341
- "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
342
- "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
343
- "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
344
- "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
345
- "model.norm.weight": "model-00006-of-00007.safetensors"
346
- }
347
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:92cc13315f24c28015d695b6cde08bb1cd6fea4cbc435998485ed6fbe4c91285
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f4c154b6a63e0b1f98f7d2847944398f99f1657d35e8eddf7fdf0ae2c24b0552
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_2.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f784c6a9507b51189f2caffbd178ea9882103b75852e31c15f47fdae6a43af1d
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/rng_state_3.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:34b023e05bc2d12b91dc436d4922b990d50ec8dc56d40dc3e36b3bb34fc81341
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a16163f34db040e2b0bd4461bd027c667560718df79714f98787d5971da9120b
3
- size 1064
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/special_tokens_map.json DELETED
@@ -1,31 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>",
5
- "<|object_ref_start|>",
6
- "<|object_ref_end|>",
7
- "<|box_start|>",
8
- "<|box_end|>",
9
- "<|quad_start|>",
10
- "<|quad_end|>",
11
- "<|vision_start|>",
12
- "<|vision_end|>",
13
- "<|vision_pad|>",
14
- "<|image_pad|>",
15
- "<|video_pad|>"
16
- ],
17
- "eos_token": {
18
- "content": "<|im_end|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/tokenizer.json DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
- size 11421896
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/tokenizer_config.json DELETED
@@ -1,208 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_prefix_space": false,
4
- "added_tokens_decoder": {
5
- "151643": {
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
- "151644": {
14
- "content": "<|im_start|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "151645": {
22
- "content": "<|im_end|>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "151646": {
30
- "content": "<|object_ref_start|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "151647": {
38
- "content": "<|object_ref_end|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "151648": {
46
- "content": "<|box_start|>",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "151649": {
54
- "content": "<|box_end|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": false,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "151650": {
62
- "content": "<|quad_start|>",
63
- "lstrip": false,
64
- "normalized": false,
65
- "rstrip": false,
66
- "single_word": false,
67
- "special": true
68
- },
69
- "151651": {
70
- "content": "<|quad_end|>",
71
- "lstrip": false,
72
- "normalized": false,
73
- "rstrip": false,
74
- "single_word": false,
75
- "special": true
76
- },
77
- "151652": {
78
- "content": "<|vision_start|>",
79
- "lstrip": false,
80
- "normalized": false,
81
- "rstrip": false,
82
- "single_word": false,
83
- "special": true
84
- },
85
- "151653": {
86
- "content": "<|vision_end|>",
87
- "lstrip": false,
88
- "normalized": false,
89
- "rstrip": false,
90
- "single_word": false,
91
- "special": true
92
- },
93
- "151654": {
94
- "content": "<|vision_pad|>",
95
- "lstrip": false,
96
- "normalized": false,
97
- "rstrip": false,
98
- "single_word": false,
99
- "special": true
100
- },
101
- "151655": {
102
- "content": "<|image_pad|>",
103
- "lstrip": false,
104
- "normalized": false,
105
- "rstrip": false,
106
- "single_word": false,
107
- "special": true
108
- },
109
- "151656": {
110
- "content": "<|video_pad|>",
111
- "lstrip": false,
112
- "normalized": false,
113
- "rstrip": false,
114
- "single_word": false,
115
- "special": true
116
- },
117
- "151657": {
118
- "content": "<tool_call>",
119
- "lstrip": false,
120
- "normalized": false,
121
- "rstrip": false,
122
- "single_word": false,
123
- "special": false
124
- },
125
- "151658": {
126
- "content": "</tool_call>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false,
131
- "special": false
132
- },
133
- "151659": {
134
- "content": "<|fim_prefix|>",
135
- "lstrip": false,
136
- "normalized": false,
137
- "rstrip": false,
138
- "single_word": false,
139
- "special": false
140
- },
141
- "151660": {
142
- "content": "<|fim_middle|>",
143
- "lstrip": false,
144
- "normalized": false,
145
- "rstrip": false,
146
- "single_word": false,
147
- "special": false
148
- },
149
- "151661": {
150
- "content": "<|fim_suffix|>",
151
- "lstrip": false,
152
- "normalized": false,
153
- "rstrip": false,
154
- "single_word": false,
155
- "special": false
156
- },
157
- "151662": {
158
- "content": "<|fim_pad|>",
159
- "lstrip": false,
160
- "normalized": false,
161
- "rstrip": false,
162
- "single_word": false,
163
- "special": false
164
- },
165
- "151663": {
166
- "content": "<|repo_name|>",
167
- "lstrip": false,
168
- "normalized": false,
169
- "rstrip": false,
170
- "single_word": false,
171
- "special": false
172
- },
173
- "151664": {
174
- "content": "<|file_sep|>",
175
- "lstrip": false,
176
- "normalized": false,
177
- "rstrip": false,
178
- "single_word": false,
179
- "special": false
180
- }
181
- },
182
- "additional_special_tokens": [
183
- "<|im_start|>",
184
- "<|im_end|>",
185
- "<|object_ref_start|>",
186
- "<|object_ref_end|>",
187
- "<|box_start|>",
188
- "<|box_end|>",
189
- "<|quad_start|>",
190
- "<|quad_end|>",
191
- "<|vision_start|>",
192
- "<|vision_end|>",
193
- "<|vision_pad|>",
194
- "<|image_pad|>",
195
- "<|video_pad|>"
196
- ],
197
- "bos_token": null,
198
- "clean_up_tokenization_spaces": false,
199
- "eos_token": "<|im_end|>",
200
- "errors": "replace",
201
- "extra_special_tokens": {},
202
- "model_max_length": 131072,
203
- "pad_token": "<|endoftext|>",
204
- "padding_side": "right",
205
- "split_special_tokens": false,
206
- "tokenizer_class": "Qwen2Tokenizer",
207
- "unk_token": null
208
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/trainer_state.json DELETED
@@ -1,1469 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 1.0,
6
- "eval_steps": 500,
7
- "global_step": 205,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.004878048780487805,
14
- "grad_norm": 6.251913070678711,
15
- "learning_rate": 5e-06,
16
- "loss": 0.7742,
17
- "step": 1
18
- },
19
- {
20
- "epoch": 0.00975609756097561,
21
- "grad_norm": 6.38546085357666,
22
- "learning_rate": 4.999997064365715e-06,
23
- "loss": 1.0365,
24
- "step": 2
25
- },
26
- {
27
- "epoch": 0.014634146341463415,
28
- "grad_norm": 3.223087787628174,
29
- "learning_rate": 4.999988257469751e-06,
30
- "loss": 0.6448,
31
- "step": 3
32
- },
33
- {
34
- "epoch": 0.01951219512195122,
35
- "grad_norm": 2.750905752182007,
36
- "learning_rate": 4.999973579332793e-06,
37
- "loss": 0.6328,
38
- "step": 4
39
- },
40
- {
41
- "epoch": 0.024390243902439025,
42
- "grad_norm": 4.2081828117370605,
43
- "learning_rate": 4.999953029989312e-06,
44
- "loss": 0.6984,
45
- "step": 5
46
- },
47
- {
48
- "epoch": 0.02926829268292683,
49
- "grad_norm": 4.167655944824219,
50
- "learning_rate": 4.999926609487568e-06,
51
- "loss": 0.9847,
52
- "step": 6
53
- },
54
- {
55
- "epoch": 0.03414634146341464,
56
- "grad_norm": 4.408457279205322,
57
- "learning_rate": 4.9998943178896106e-06,
58
- "loss": 0.8203,
59
- "step": 7
60
- },
61
- {
62
- "epoch": 0.03902439024390244,
63
- "grad_norm": 3.1360676288604736,
64
- "learning_rate": 4.999856155271276e-06,
65
- "loss": 0.6513,
66
- "step": 8
67
- },
68
- {
69
- "epoch": 0.04390243902439024,
70
- "grad_norm": 3.3297390937805176,
71
- "learning_rate": 4.999812121722191e-06,
72
- "loss": 0.7803,
73
- "step": 9
74
- },
75
- {
76
- "epoch": 0.04878048780487805,
77
- "grad_norm": 3.5323646068573,
78
- "learning_rate": 4.999762217345766e-06,
79
- "loss": 0.7527,
80
- "step": 10
81
- },
82
- {
83
- "epoch": 0.05365853658536585,
84
- "grad_norm": 1.9139900207519531,
85
- "learning_rate": 4.999706442259205e-06,
86
- "loss": 0.5721,
87
- "step": 11
88
- },
89
- {
90
- "epoch": 0.05853658536585366,
91
- "grad_norm": 2.2962052822113037,
92
- "learning_rate": 4.999644796593492e-06,
93
- "loss": 0.6354,
94
- "step": 12
95
- },
96
- {
97
- "epoch": 0.06341463414634146,
98
- "grad_norm": 1.8342329263687134,
99
- "learning_rate": 4.999577280493407e-06,
100
- "loss": 0.5259,
101
- "step": 13
102
- },
103
- {
104
- "epoch": 0.06829268292682927,
105
- "grad_norm": 2.8183422088623047,
106
- "learning_rate": 4.99950389411751e-06,
107
- "loss": 0.8089,
108
- "step": 14
109
- },
110
- {
111
- "epoch": 0.07317073170731707,
112
- "grad_norm": 2.2182018756866455,
113
- "learning_rate": 4.999424637638148e-06,
114
- "loss": 0.6002,
115
- "step": 15
116
- },
117
- {
118
- "epoch": 0.07804878048780488,
119
- "grad_norm": 2.4346132278442383,
120
- "learning_rate": 4.999339511241458e-06,
121
- "loss": 0.6651,
122
- "step": 16
123
- },
124
- {
125
- "epoch": 0.08292682926829269,
126
- "grad_norm": 1.9976483583450317,
127
- "learning_rate": 4.9992485151273584e-06,
128
- "loss": 0.655,
129
- "step": 17
130
- },
131
- {
132
- "epoch": 0.08780487804878048,
133
- "grad_norm": 2.1938087940216064,
134
- "learning_rate": 4.999151649509554e-06,
135
- "loss": 0.5523,
136
- "step": 18
137
- },
138
- {
139
- "epoch": 0.09268292682926829,
140
- "grad_norm": 2.5118227005004883,
141
- "learning_rate": 4.9990489146155356e-06,
142
- "loss": 0.8695,
143
- "step": 19
144
- },
145
- {
146
- "epoch": 0.0975609756097561,
147
- "grad_norm": 2.111804723739624,
148
- "learning_rate": 4.9989403106865765e-06,
149
- "loss": 0.8075,
150
- "step": 20
151
- },
152
- {
153
- "epoch": 0.1024390243902439,
154
- "grad_norm": 2.2701148986816406,
155
- "learning_rate": 4.9988258379777334e-06,
156
- "loss": 0.6715,
157
- "step": 21
158
- },
159
- {
160
- "epoch": 0.1073170731707317,
161
- "grad_norm": 2.2692644596099854,
162
- "learning_rate": 4.998705496757846e-06,
163
- "loss": 0.7627,
164
- "step": 22
165
- },
166
- {
167
- "epoch": 0.11219512195121951,
168
- "grad_norm": 1.8187084197998047,
169
- "learning_rate": 4.998579287309538e-06,
170
- "loss": 0.9055,
171
- "step": 23
172
- },
173
- {
174
- "epoch": 0.11707317073170732,
175
- "grad_norm": 1.498978614807129,
176
- "learning_rate": 4.998447209929211e-06,
177
- "loss": 0.4455,
178
- "step": 24
179
- },
180
- {
181
- "epoch": 0.12195121951219512,
182
- "grad_norm": 2.2440743446350098,
183
- "learning_rate": 4.998309264927053e-06,
184
- "loss": 0.4843,
185
- "step": 25
186
- },
187
- {
188
- "epoch": 0.12682926829268293,
189
- "grad_norm": 1.8050298690795898,
190
- "learning_rate": 4.998165452627025e-06,
191
- "loss": 0.6016,
192
- "step": 26
193
- },
194
- {
195
- "epoch": 0.13170731707317074,
196
- "grad_norm": 1.9306098222732544,
197
- "learning_rate": 4.998015773366874e-06,
198
- "loss": 0.5469,
199
- "step": 27
200
- },
201
- {
202
- "epoch": 0.13658536585365855,
203
- "grad_norm": 2.152693510055542,
204
- "learning_rate": 4.997860227498122e-06,
205
- "loss": 0.5583,
206
- "step": 28
207
- },
208
- {
209
- "epoch": 0.14146341463414633,
210
- "grad_norm": 2.3152549266815186,
211
- "learning_rate": 4.99769881538607e-06,
212
- "loss": 0.6947,
213
- "step": 29
214
- },
215
- {
216
- "epoch": 0.14634146341463414,
217
- "grad_norm": 1.830079197883606,
218
- "learning_rate": 4.997531537409794e-06,
219
- "loss": 0.6863,
220
- "step": 30
221
- },
222
- {
223
- "epoch": 0.15121951219512195,
224
- "grad_norm": 2.574989080429077,
225
- "learning_rate": 4.99735839396215e-06,
226
- "loss": 0.6081,
227
- "step": 31
228
- },
229
- {
230
- "epoch": 0.15609756097560976,
231
- "grad_norm": 1.9973617792129517,
232
- "learning_rate": 4.9971793854497655e-06,
233
- "loss": 0.5624,
234
- "step": 32
235
- },
236
- {
237
- "epoch": 0.16097560975609757,
238
- "grad_norm": 1.7976499795913696,
239
- "learning_rate": 4.996994512293042e-06,
240
- "loss": 0.514,
241
- "step": 33
242
- },
243
- {
244
- "epoch": 0.16585365853658537,
245
- "grad_norm": 1.865333914756775,
246
- "learning_rate": 4.996803774926157e-06,
247
- "loss": 0.5389,
248
- "step": 34
249
- },
250
- {
251
- "epoch": 0.17073170731707318,
252
- "grad_norm": 2.7497925758361816,
253
- "learning_rate": 4.996607173797059e-06,
254
- "loss": 1.092,
255
- "step": 35
256
- },
257
- {
258
- "epoch": 0.17560975609756097,
259
- "grad_norm": 2.1576943397521973,
260
- "learning_rate": 4.996404709367466e-06,
261
- "loss": 0.6338,
262
- "step": 36
263
- },
264
- {
265
- "epoch": 0.18048780487804877,
266
- "grad_norm": 1.9310261011123657,
267
- "learning_rate": 4.996196382112868e-06,
268
- "loss": 0.5288,
269
- "step": 37
270
- },
271
- {
272
- "epoch": 0.18536585365853658,
273
- "grad_norm": 2.566770553588867,
274
- "learning_rate": 4.9959821925225235e-06,
275
- "loss": 0.7568,
276
- "step": 38
277
- },
278
- {
279
- "epoch": 0.1902439024390244,
280
- "grad_norm": 1.8456333875656128,
281
- "learning_rate": 4.995762141099456e-06,
282
- "loss": 0.6288,
283
- "step": 39
284
- },
285
- {
286
- "epoch": 0.1951219512195122,
287
- "grad_norm": 2.9497272968292236,
288
- "learning_rate": 4.995536228360461e-06,
289
- "loss": 0.8085,
290
- "step": 40
291
- },
292
- {
293
- "epoch": 0.2,
294
- "grad_norm": 2.113572359085083,
295
- "learning_rate": 4.995304454836095e-06,
296
- "loss": 0.7446,
297
- "step": 41
298
- },
299
- {
300
- "epoch": 0.2048780487804878,
301
- "grad_norm": 2.083073139190674,
302
- "learning_rate": 4.9950668210706795e-06,
303
- "loss": 0.61,
304
- "step": 42
305
- },
306
- {
307
- "epoch": 0.2097560975609756,
308
- "grad_norm": 1.7291383743286133,
309
- "learning_rate": 4.994823327622299e-06,
310
- "loss": 0.4961,
311
- "step": 43
312
- },
313
- {
314
- "epoch": 0.2146341463414634,
315
- "grad_norm": 2.4040563106536865,
316
- "learning_rate": 4.9945739750628e-06,
317
- "loss": 0.6091,
318
- "step": 44
319
- },
320
- {
321
- "epoch": 0.21951219512195122,
322
- "grad_norm": 1.6997170448303223,
323
- "learning_rate": 4.994318763977789e-06,
324
- "loss": 0.6408,
325
- "step": 45
326
- },
327
- {
328
- "epoch": 0.22439024390243903,
329
- "grad_norm": 1.5265686511993408,
330
- "learning_rate": 4.994057694966632e-06,
331
- "loss": 0.4422,
332
- "step": 46
333
- },
334
- {
335
- "epoch": 0.22926829268292684,
336
- "grad_norm": 1.6987519264221191,
337
- "learning_rate": 4.993790768642449e-06,
338
- "loss": 0.8084,
339
- "step": 47
340
- },
341
- {
342
- "epoch": 0.23414634146341465,
343
- "grad_norm": 1.9533758163452148,
344
- "learning_rate": 4.99351798563212e-06,
345
- "loss": 0.9267,
346
- "step": 48
347
- },
348
- {
349
- "epoch": 0.23902439024390243,
350
- "grad_norm": 2.0147147178649902,
351
- "learning_rate": 4.993239346576278e-06,
352
- "loss": 0.7258,
353
- "step": 49
354
- },
355
- {
356
- "epoch": 0.24390243902439024,
357
- "grad_norm": 1.7322336435317993,
358
- "learning_rate": 4.99295485212931e-06,
359
- "loss": 0.7664,
360
- "step": 50
361
- },
362
- {
363
- "epoch": 0.24878048780487805,
364
- "grad_norm": 1.972558856010437,
365
- "learning_rate": 4.992664502959351e-06,
366
- "loss": 0.6476,
367
- "step": 51
368
- },
369
- {
370
- "epoch": 0.25365853658536586,
371
- "grad_norm": 2.168142557144165,
372
- "learning_rate": 4.99236829974829e-06,
373
- "loss": 0.6274,
374
- "step": 52
375
- },
376
- {
377
- "epoch": 0.25853658536585367,
378
- "grad_norm": 2.1706929206848145,
379
- "learning_rate": 4.992066243191762e-06,
380
- "loss": 0.8237,
381
- "step": 53
382
- },
383
- {
384
- "epoch": 0.2634146341463415,
385
- "grad_norm": 1.7400177717208862,
386
- "learning_rate": 4.991758333999148e-06,
387
- "loss": 0.6377,
388
- "step": 54
389
- },
390
- {
391
- "epoch": 0.2682926829268293,
392
- "grad_norm": 2.02597713470459,
393
- "learning_rate": 4.991444572893575e-06,
394
- "loss": 0.5368,
395
- "step": 55
396
- },
397
- {
398
- "epoch": 0.2731707317073171,
399
- "grad_norm": 1.9592632055282593,
400
- "learning_rate": 4.991124960611916e-06,
401
- "loss": 0.4826,
402
- "step": 56
403
- },
404
- {
405
- "epoch": 0.2780487804878049,
406
- "grad_norm": 1.6813147068023682,
407
- "learning_rate": 4.99079949790478e-06,
408
- "loss": 0.5786,
409
- "step": 57
410
- },
411
- {
412
- "epoch": 0.28292682926829266,
413
- "grad_norm": 1.5665404796600342,
414
- "learning_rate": 4.99046818553652e-06,
415
- "loss": 0.6276,
416
- "step": 58
417
- },
418
- {
419
- "epoch": 0.28780487804878047,
420
- "grad_norm": 1.9345953464508057,
421
- "learning_rate": 4.9901310242852246e-06,
422
- "loss": 0.5943,
423
- "step": 59
424
- },
425
- {
426
- "epoch": 0.2926829268292683,
427
- "grad_norm": 1.4303550720214844,
428
- "learning_rate": 4.9897880149427206e-06,
429
- "loss": 0.5156,
430
- "step": 60
431
- },
432
- {
433
- "epoch": 0.2975609756097561,
434
- "grad_norm": 2.3535423278808594,
435
- "learning_rate": 4.989439158314566e-06,
436
- "loss": 0.6139,
437
- "step": 61
438
- },
439
- {
440
- "epoch": 0.3024390243902439,
441
- "grad_norm": 1.617553949356079,
442
- "learning_rate": 4.989084455220056e-06,
443
- "loss": 0.4543,
444
- "step": 62
445
- },
446
- {
447
- "epoch": 0.3073170731707317,
448
- "grad_norm": 2.2876179218292236,
449
- "learning_rate": 4.988723906492212e-06,
450
- "loss": 0.8291,
451
- "step": 63
452
- },
453
- {
454
- "epoch": 0.3121951219512195,
455
- "grad_norm": 1.781219720840454,
456
- "learning_rate": 4.988357512977785e-06,
457
- "loss": 0.4443,
458
- "step": 64
459
- },
460
- {
461
- "epoch": 0.3170731707317073,
462
- "grad_norm": 1.8858840465545654,
463
- "learning_rate": 4.987985275537252e-06,
464
- "loss": 0.4885,
465
- "step": 65
466
- },
467
- {
468
- "epoch": 0.32195121951219513,
469
- "grad_norm": 1.685311198234558,
470
- "learning_rate": 4.9876071950448185e-06,
471
- "loss": 0.6638,
472
- "step": 66
473
- },
474
- {
475
- "epoch": 0.32682926829268294,
476
- "grad_norm": 1.711588740348816,
477
- "learning_rate": 4.987223272388407e-06,
478
- "loss": 0.4916,
479
- "step": 67
480
- },
481
- {
482
- "epoch": 0.33170731707317075,
483
- "grad_norm": 1.9743118286132812,
484
- "learning_rate": 4.986833508469663e-06,
485
- "loss": 0.837,
486
- "step": 68
487
- },
488
- {
489
- "epoch": 0.33658536585365856,
490
- "grad_norm": 1.3840869665145874,
491
- "learning_rate": 4.98643790420395e-06,
492
- "loss": 0.4716,
493
- "step": 69
494
- },
495
- {
496
- "epoch": 0.34146341463414637,
497
- "grad_norm": 2.4423623085021973,
498
- "learning_rate": 4.986036460520348e-06,
499
- "loss": 0.7045,
500
- "step": 70
501
- },
502
- {
503
- "epoch": 0.3463414634146341,
504
- "grad_norm": 1.9470542669296265,
505
- "learning_rate": 4.98562917836165e-06,
506
- "loss": 0.6841,
507
- "step": 71
508
- },
509
- {
510
- "epoch": 0.35121951219512193,
511
- "grad_norm": 1.357690691947937,
512
- "learning_rate": 4.985216058684362e-06,
513
- "loss": 0.4278,
514
- "step": 72
515
- },
516
- {
517
- "epoch": 0.35609756097560974,
518
- "grad_norm": 1.6999742984771729,
519
- "learning_rate": 4.984797102458697e-06,
520
- "loss": 0.8335,
521
- "step": 73
522
- },
523
- {
524
- "epoch": 0.36097560975609755,
525
- "grad_norm": 2.0041823387145996,
526
- "learning_rate": 4.984372310668579e-06,
527
- "loss": 0.5316,
528
- "step": 74
529
- },
530
- {
531
- "epoch": 0.36585365853658536,
532
- "grad_norm": 1.8468217849731445,
533
- "learning_rate": 4.983941684311633e-06,
534
- "loss": 1.0183,
535
- "step": 75
536
- },
537
- {
538
- "epoch": 0.37073170731707317,
539
- "grad_norm": 1.5536184310913086,
540
- "learning_rate": 4.983505224399188e-06,
541
- "loss": 0.4932,
542
- "step": 76
543
- },
544
- {
545
- "epoch": 0.375609756097561,
546
- "grad_norm": 1.3191509246826172,
547
- "learning_rate": 4.983062931956275e-06,
548
- "loss": 0.5752,
549
- "step": 77
550
- },
551
- {
552
- "epoch": 0.3804878048780488,
553
- "grad_norm": 1.9427785873413086,
554
- "learning_rate": 4.9826148080216195e-06,
555
- "loss": 0.6428,
556
- "step": 78
557
- },
558
- {
559
- "epoch": 0.3853658536585366,
560
- "grad_norm": 2.217280387878418,
561
- "learning_rate": 4.9821608536476445e-06,
562
- "loss": 1.0444,
563
- "step": 79
564
- },
565
- {
566
- "epoch": 0.3902439024390244,
567
- "grad_norm": 1.7984737157821655,
568
- "learning_rate": 4.981701069900465e-06,
569
- "loss": 0.5578,
570
- "step": 80
571
- },
572
- {
573
- "epoch": 0.3951219512195122,
574
- "grad_norm": 1.859869122505188,
575
- "learning_rate": 4.9812354578598876e-06,
576
- "loss": 0.5875,
577
- "step": 81
578
- },
579
- {
580
- "epoch": 0.4,
581
- "grad_norm": 1.8699328899383545,
582
- "learning_rate": 4.980764018619405e-06,
583
- "loss": 0.6775,
584
- "step": 82
585
- },
586
- {
587
- "epoch": 0.40487804878048783,
588
- "grad_norm": 1.6898409128189087,
589
- "learning_rate": 4.980286753286196e-06,
590
- "loss": 0.6081,
591
- "step": 83
592
- },
593
- {
594
- "epoch": 0.4097560975609756,
595
- "grad_norm": 1.5678975582122803,
596
- "learning_rate": 4.97980366298112e-06,
597
- "loss": 0.5573,
598
- "step": 84
599
- },
600
- {
601
- "epoch": 0.4146341463414634,
602
- "grad_norm": 1.4087779521942139,
603
- "learning_rate": 4.97931474883872e-06,
604
- "loss": 0.6142,
605
- "step": 85
606
- },
607
- {
608
- "epoch": 0.4195121951219512,
609
- "grad_norm": 1.8441438674926758,
610
- "learning_rate": 4.978820012007213e-06,
611
- "loss": 0.6677,
612
- "step": 86
613
- },
614
- {
615
- "epoch": 0.424390243902439,
616
- "grad_norm": 1.7426705360412598,
617
- "learning_rate": 4.978319453648495e-06,
618
- "loss": 0.7166,
619
- "step": 87
620
- },
621
- {
622
- "epoch": 0.4292682926829268,
623
- "grad_norm": 1.7627829313278198,
624
- "learning_rate": 4.977813074938128e-06,
625
- "loss": 0.5965,
626
- "step": 88
627
- },
628
- {
629
- "epoch": 0.43414634146341463,
630
- "grad_norm": 1.6656118631362915,
631
- "learning_rate": 4.977300877065347e-06,
632
- "loss": 0.5908,
633
- "step": 89
634
- },
635
- {
636
- "epoch": 0.43902439024390244,
637
- "grad_norm": 1.2669886350631714,
638
- "learning_rate": 4.976782861233053e-06,
639
- "loss": 0.4368,
640
- "step": 90
641
- },
642
- {
643
- "epoch": 0.44390243902439025,
644
- "grad_norm": 1.2918105125427246,
645
- "learning_rate": 4.976259028657812e-06,
646
- "loss": 0.3848,
647
- "step": 91
648
- },
649
- {
650
- "epoch": 0.44878048780487806,
651
- "grad_norm": 1.3664082288742065,
652
- "learning_rate": 4.975729380569845e-06,
653
- "loss": 0.5376,
654
- "step": 92
655
- },
656
- {
657
- "epoch": 0.45365853658536587,
658
- "grad_norm": 1.9810562133789062,
659
- "learning_rate": 4.975193918213035e-06,
660
- "loss": 0.4396,
661
- "step": 93
662
- },
663
- {
664
- "epoch": 0.4585365853658537,
665
- "grad_norm": 1.5167036056518555,
666
- "learning_rate": 4.974652642844921e-06,
667
- "loss": 0.5573,
668
- "step": 94
669
- },
670
- {
671
- "epoch": 0.4634146341463415,
672
- "grad_norm": 3.6640360355377197,
673
- "learning_rate": 4.974105555736693e-06,
674
- "loss": 0.8419,
675
- "step": 95
676
- },
677
- {
678
- "epoch": 0.4682926829268293,
679
- "grad_norm": 2.480140209197998,
680
- "learning_rate": 4.973552658173186e-06,
681
- "loss": 0.6014,
682
- "step": 96
683
- },
684
- {
685
- "epoch": 0.47317073170731705,
686
- "grad_norm": 1.6618154048919678,
687
- "learning_rate": 4.972993951452887e-06,
688
- "loss": 0.5581,
689
- "step": 97
690
- },
691
- {
692
- "epoch": 0.47804878048780486,
693
- "grad_norm": 1.291348934173584,
694
- "learning_rate": 4.9724294368879214e-06,
695
- "loss": 0.4499,
696
- "step": 98
697
- },
698
- {
699
- "epoch": 0.48292682926829267,
700
- "grad_norm": 1.8929156064987183,
701
- "learning_rate": 4.971859115804055e-06,
702
- "loss": 0.7873,
703
- "step": 99
704
- },
705
- {
706
- "epoch": 0.4878048780487805,
707
- "grad_norm": 1.57858145236969,
708
- "learning_rate": 4.9712829895406935e-06,
709
- "loss": 0.5793,
710
- "step": 100
711
- },
712
- {
713
- "epoch": 0.4926829268292683,
714
- "grad_norm": 1.1852738857269287,
715
- "learning_rate": 4.970701059450872e-06,
716
- "loss": 0.3179,
717
- "step": 101
718
- },
719
- {
720
- "epoch": 0.4975609756097561,
721
- "grad_norm": 1.8145536184310913,
722
- "learning_rate": 4.970113326901258e-06,
723
- "loss": 0.7649,
724
- "step": 102
725
- },
726
- {
727
- "epoch": 0.5024390243902439,
728
- "grad_norm": 1.5796900987625122,
729
- "learning_rate": 4.9695197932721455e-06,
730
- "loss": 0.6736,
731
- "step": 103
732
- },
733
- {
734
- "epoch": 0.5073170731707317,
735
- "grad_norm": 1.4091283082962036,
736
- "learning_rate": 4.968920459957453e-06,
737
- "loss": 0.6019,
738
- "step": 104
739
- },
740
- {
741
- "epoch": 0.5121951219512195,
742
- "grad_norm": 2.3007757663726807,
743
- "learning_rate": 4.968315328364719e-06,
744
- "loss": 0.8106,
745
- "step": 105
746
- },
747
- {
748
- "epoch": 0.5170731707317073,
749
- "grad_norm": 2.226351499557495,
750
- "learning_rate": 4.9677043999151e-06,
751
- "loss": 0.9389,
752
- "step": 106
753
- },
754
- {
755
- "epoch": 0.5219512195121951,
756
- "grad_norm": 1.713365912437439,
757
- "learning_rate": 4.967087676043366e-06,
758
- "loss": 0.3773,
759
- "step": 107
760
- },
761
- {
762
- "epoch": 0.526829268292683,
763
- "grad_norm": 1.6297917366027832,
764
- "learning_rate": 4.966465158197897e-06,
765
- "loss": 0.6278,
766
- "step": 108
767
- },
768
- {
769
- "epoch": 0.5317073170731708,
770
- "grad_norm": 1.7754937410354614,
771
- "learning_rate": 4.965836847840681e-06,
772
- "loss": 0.5086,
773
- "step": 109
774
- },
775
- {
776
- "epoch": 0.5365853658536586,
777
- "grad_norm": 1.6794660091400146,
778
- "learning_rate": 4.96520274644731e-06,
779
- "loss": 0.6123,
780
- "step": 110
781
- },
782
- {
783
- "epoch": 0.5414634146341464,
784
- "grad_norm": 1.934241533279419,
785
- "learning_rate": 4.964562855506976e-06,
786
- "loss": 0.5779,
787
- "step": 111
788
- },
789
- {
790
- "epoch": 0.5463414634146342,
791
- "grad_norm": 1.2584961652755737,
792
- "learning_rate": 4.963917176522466e-06,
793
- "loss": 0.4115,
794
- "step": 112
795
- },
796
- {
797
- "epoch": 0.551219512195122,
798
- "grad_norm": 2.1711580753326416,
799
- "learning_rate": 4.963265711010164e-06,
800
- "loss": 0.7418,
801
- "step": 113
802
- },
803
- {
804
- "epoch": 0.5560975609756098,
805
- "grad_norm": 1.650855541229248,
806
- "learning_rate": 4.9626084605000395e-06,
807
- "loss": 0.5725,
808
- "step": 114
809
- },
810
- {
811
- "epoch": 0.5609756097560976,
812
- "grad_norm": 1.9013773202896118,
813
- "learning_rate": 4.961945426535652e-06,
814
- "loss": 0.4464,
815
- "step": 115
816
- },
817
- {
818
- "epoch": 0.5658536585365853,
819
- "grad_norm": 1.4872888326644897,
820
- "learning_rate": 4.961276610674141e-06,
821
- "loss": 0.6751,
822
- "step": 116
823
- },
824
- {
825
- "epoch": 0.5707317073170731,
826
- "grad_norm": 1.5318901538848877,
827
- "learning_rate": 4.960602014486225e-06,
828
- "loss": 0.7975,
829
- "step": 117
830
- },
831
- {
832
- "epoch": 0.5756097560975609,
833
- "grad_norm": 1.2589044570922852,
834
- "learning_rate": 4.959921639556199e-06,
835
- "loss": 0.4939,
836
- "step": 118
837
- },
838
- {
839
- "epoch": 0.5804878048780487,
840
- "grad_norm": 1.881285548210144,
841
- "learning_rate": 4.959235487481928e-06,
842
- "loss": 0.8222,
843
- "step": 119
844
- },
845
- {
846
- "epoch": 0.5853658536585366,
847
- "grad_norm": 1.576146125793457,
848
- "learning_rate": 4.958543559874846e-06,
849
- "loss": 0.4076,
850
- "step": 120
851
- },
852
- {
853
- "epoch": 0.5902439024390244,
854
- "grad_norm": 2.0078847408294678,
855
- "learning_rate": 4.9578458583599495e-06,
856
- "loss": 0.562,
857
- "step": 121
858
- },
859
- {
860
- "epoch": 0.5951219512195122,
861
- "grad_norm": 1.4955941438674927,
862
- "learning_rate": 4.957142384575795e-06,
863
- "loss": 0.5184,
864
- "step": 122
865
- },
866
- {
867
- "epoch": 0.6,
868
- "grad_norm": 1.9556761980056763,
869
- "learning_rate": 4.956433140174498e-06,
870
- "loss": 0.7036,
871
- "step": 123
872
- },
873
- {
874
- "epoch": 0.6048780487804878,
875
- "grad_norm": 1.9274554252624512,
876
- "learning_rate": 4.9557181268217225e-06,
877
- "loss": 0.7137,
878
- "step": 124
879
- },
880
- {
881
- "epoch": 0.6097560975609756,
882
- "grad_norm": 1.672255277633667,
883
- "learning_rate": 4.954997346196683e-06,
884
- "loss": 1.0138,
885
- "step": 125
886
- },
887
- {
888
- "epoch": 0.6146341463414634,
889
- "grad_norm": 1.6470623016357422,
890
- "learning_rate": 4.954270799992138e-06,
891
- "loss": 0.6059,
892
- "step": 126
893
- },
894
- {
895
- "epoch": 0.6195121951219512,
896
- "grad_norm": 2.0698301792144775,
897
- "learning_rate": 4.953538489914387e-06,
898
- "loss": 0.6224,
899
- "step": 127
900
- },
901
- {
902
- "epoch": 0.624390243902439,
903
- "grad_norm": 1.7772653102874756,
904
- "learning_rate": 4.9528004176832654e-06,
905
- "loss": 0.4795,
906
- "step": 128
907
- },
908
- {
909
- "epoch": 0.6292682926829268,
910
- "grad_norm": 2.276451587677002,
911
- "learning_rate": 4.952056585032142e-06,
912
- "loss": 0.8591,
913
- "step": 129
914
- },
915
- {
916
- "epoch": 0.6341463414634146,
917
- "grad_norm": 2.058436870574951,
918
- "learning_rate": 4.951306993707913e-06,
919
- "loss": 0.6678,
920
- "step": 130
921
- },
922
- {
923
- "epoch": 0.6390243902439025,
924
- "grad_norm": 1.8003332614898682,
925
- "learning_rate": 4.950551645470998e-06,
926
- "loss": 0.617,
927
- "step": 131
928
- },
929
- {
930
- "epoch": 0.6439024390243903,
931
- "grad_norm": 2.0872535705566406,
932
- "learning_rate": 4.9497905420953406e-06,
933
- "loss": 0.611,
934
- "step": 132
935
- },
936
- {
937
- "epoch": 0.6487804878048781,
938
- "grad_norm": 2.1815896034240723,
939
- "learning_rate": 4.949023685368395e-06,
940
- "loss": 0.6445,
941
- "step": 133
942
- },
943
- {
944
- "epoch": 0.6536585365853659,
945
- "grad_norm": 1.7983999252319336,
946
- "learning_rate": 4.948251077091131e-06,
947
- "loss": 0.8733,
948
- "step": 134
949
- },
950
- {
951
- "epoch": 0.6585365853658537,
952
- "grad_norm": 1.6845791339874268,
953
- "learning_rate": 4.947472719078025e-06,
954
- "loss": 0.6613,
955
- "step": 135
956
- },
957
- {
958
- "epoch": 0.6634146341463415,
959
- "grad_norm": 1.6869384050369263,
960
- "learning_rate": 4.9466886131570565e-06,
961
- "loss": 0.7319,
962
- "step": 136
963
- },
964
- {
965
- "epoch": 0.6682926829268293,
966
- "grad_norm": 1.9151450395584106,
967
- "learning_rate": 4.945898761169704e-06,
968
- "loss": 0.8795,
969
- "step": 137
970
- },
971
- {
972
- "epoch": 0.6731707317073171,
973
- "grad_norm": 1.3423423767089844,
974
- "learning_rate": 4.945103164970941e-06,
975
- "loss": 0.4385,
976
- "step": 138
977
- },
978
- {
979
- "epoch": 0.6780487804878049,
980
- "grad_norm": 1.4854035377502441,
981
- "learning_rate": 4.9443018264292304e-06,
982
- "loss": 0.5341,
983
- "step": 139
984
- },
985
- {
986
- "epoch": 0.6829268292682927,
987
- "grad_norm": 1.805953025817871,
988
- "learning_rate": 4.9434947474265225e-06,
989
- "loss": 0.6463,
990
- "step": 140
991
- },
992
- {
993
- "epoch": 0.6878048780487804,
994
- "grad_norm": 1.7254730463027954,
995
- "learning_rate": 4.942681929858249e-06,
996
- "loss": 0.8897,
997
- "step": 141
998
- },
999
- {
1000
- "epoch": 0.6926829268292682,
1001
- "grad_norm": 1.9024320840835571,
1002
- "learning_rate": 4.941863375633315e-06,
1003
- "loss": 0.7033,
1004
- "step": 142
1005
- },
1006
- {
1007
- "epoch": 0.697560975609756,
1008
- "grad_norm": 1.4351361989974976,
1009
- "learning_rate": 4.9410390866741056e-06,
1010
- "loss": 0.527,
1011
- "step": 143
1012
- },
1013
- {
1014
- "epoch": 0.7024390243902439,
1015
- "grad_norm": 1.4365131855010986,
1016
- "learning_rate": 4.9402090649164655e-06,
1017
- "loss": 0.5458,
1018
- "step": 144
1019
- },
1020
- {
1021
- "epoch": 0.7073170731707317,
1022
- "grad_norm": 2.144742965698242,
1023
- "learning_rate": 4.9393733123097085e-06,
1024
- "loss": 0.8133,
1025
- "step": 145
1026
- },
1027
- {
1028
- "epoch": 0.7121951219512195,
1029
- "grad_norm": 2.066554546356201,
1030
- "learning_rate": 4.9385318308166065e-06,
1031
- "loss": 0.7282,
1032
- "step": 146
1033
- },
1034
- {
1035
- "epoch": 0.7170731707317073,
1036
- "grad_norm": 1.5883394479751587,
1037
- "learning_rate": 4.937684622413385e-06,
1038
- "loss": 0.4144,
1039
- "step": 147
1040
- },
1041
- {
1042
- "epoch": 0.7219512195121951,
1043
- "grad_norm": 2.329960346221924,
1044
- "learning_rate": 4.9368316890897185e-06,
1045
- "loss": 0.8182,
1046
- "step": 148
1047
- },
1048
- {
1049
- "epoch": 0.7268292682926829,
1050
- "grad_norm": 1.5622303485870361,
1051
- "learning_rate": 4.9359730328487264e-06,
1052
- "loss": 0.412,
1053
- "step": 149
1054
- },
1055
- {
1056
- "epoch": 0.7317073170731707,
1057
- "grad_norm": 1.3020845651626587,
1058
- "learning_rate": 4.935108655706972e-06,
1059
- "loss": 0.5607,
1060
- "step": 150
1061
- },
1062
- {
1063
- "epoch": 0.7365853658536585,
1064
- "grad_norm": 2.0023865699768066,
1065
- "learning_rate": 4.934238559694448e-06,
1066
- "loss": 0.9273,
1067
- "step": 151
1068
- },
1069
- {
1070
- "epoch": 0.7414634146341463,
1071
- "grad_norm": 1.6495121717453003,
1072
- "learning_rate": 4.9333627468545845e-06,
1073
- "loss": 0.4775,
1074
- "step": 152
1075
- },
1076
- {
1077
- "epoch": 0.7463414634146341,
1078
- "grad_norm": 1.4878952503204346,
1079
- "learning_rate": 4.932481219244231e-06,
1080
- "loss": 0.562,
1081
- "step": 153
1082
- },
1083
- {
1084
- "epoch": 0.751219512195122,
1085
- "grad_norm": 2.352097511291504,
1086
- "learning_rate": 4.931593978933666e-06,
1087
- "loss": 0.6151,
1088
- "step": 154
1089
- },
1090
- {
1091
- "epoch": 0.7560975609756098,
1092
- "grad_norm": 1.3260765075683594,
1093
- "learning_rate": 4.930701028006577e-06,
1094
- "loss": 0.6572,
1095
- "step": 155
1096
- },
1097
- {
1098
- "epoch": 0.7609756097560976,
1099
- "grad_norm": 1.3987398147583008,
1100
- "learning_rate": 4.929802368560066e-06,
1101
- "loss": 0.5205,
1102
- "step": 156
1103
- },
1104
- {
1105
- "epoch": 0.7658536585365854,
1106
- "grad_norm": 2.636143922805786,
1107
- "learning_rate": 4.928898002704642e-06,
1108
- "loss": 0.7562,
1109
- "step": 157
1110
- },
1111
- {
1112
- "epoch": 0.7707317073170732,
1113
- "grad_norm": 2.312101125717163,
1114
- "learning_rate": 4.927987932564215e-06,
1115
- "loss": 0.6415,
1116
- "step": 158
1117
- },
1118
- {
1119
- "epoch": 0.775609756097561,
1120
- "grad_norm": 1.6384342908859253,
1121
- "learning_rate": 4.927072160276092e-06,
1122
- "loss": 0.6148,
1123
- "step": 159
1124
- },
1125
- {
1126
- "epoch": 0.7804878048780488,
1127
- "grad_norm": 1.3910821676254272,
1128
- "learning_rate": 4.926150687990969e-06,
1129
- "loss": 0.3866,
1130
- "step": 160
1131
- },
1132
- {
1133
- "epoch": 0.7853658536585366,
1134
- "grad_norm": 1.7929809093475342,
1135
- "learning_rate": 4.925223517872934e-06,
1136
- "loss": 0.7282,
1137
- "step": 161
1138
- },
1139
- {
1140
- "epoch": 0.7902439024390244,
1141
- "grad_norm": 2.1226377487182617,
1142
- "learning_rate": 4.9242906520994484e-06,
1143
- "loss": 0.8234,
1144
- "step": 162
1145
- },
1146
- {
1147
- "epoch": 0.7951219512195122,
1148
- "grad_norm": 1.6692653894424438,
1149
- "learning_rate": 4.923352092861358e-06,
1150
- "loss": 0.7111,
1151
- "step": 163
1152
- },
1153
- {
1154
- "epoch": 0.8,
1155
- "grad_norm": 1.7468419075012207,
1156
- "learning_rate": 4.922407842362875e-06,
1157
- "loss": 0.4963,
1158
- "step": 164
1159
- },
1160
- {
1161
- "epoch": 0.8048780487804879,
1162
- "grad_norm": 1.5026869773864746,
1163
- "learning_rate": 4.921457902821578e-06,
1164
- "loss": 0.7182,
1165
- "step": 165
1166
- },
1167
- {
1168
- "epoch": 0.8097560975609757,
1169
- "grad_norm": 1.565578818321228,
1170
- "learning_rate": 4.920502276468408e-06,
1171
- "loss": 0.6331,
1172
- "step": 166
1173
- },
1174
- {
1175
- "epoch": 0.8146341463414634,
1176
- "grad_norm": 1.5285255908966064,
1177
- "learning_rate": 4.9195409655476605e-06,
1178
- "loss": 0.5216,
1179
- "step": 167
1180
- },
1181
- {
1182
- "epoch": 0.8195121951219512,
1183
- "grad_norm": 2.1599016189575195,
1184
- "learning_rate": 4.918573972316982e-06,
1185
- "loss": 0.8197,
1186
- "step": 168
1187
- },
1188
- {
1189
- "epoch": 0.824390243902439,
1190
- "grad_norm": 1.9766514301300049,
1191
- "learning_rate": 4.917601299047361e-06,
1192
- "loss": 0.6364,
1193
- "step": 169
1194
- },
1195
- {
1196
- "epoch": 0.8292682926829268,
1197
- "grad_norm": 1.6226218938827515,
1198
- "learning_rate": 4.916622948023129e-06,
1199
- "loss": 0.5688,
1200
- "step": 170
1201
- },
1202
- {
1203
- "epoch": 0.8341463414634146,
1204
- "grad_norm": 2.169351100921631,
1205
- "learning_rate": 4.915638921541952e-06,
1206
- "loss": 0.5144,
1207
- "step": 171
1208
- },
1209
- {
1210
- "epoch": 0.8390243902439024,
1211
- "grad_norm": 2.0374093055725098,
1212
- "learning_rate": 4.914649221914822e-06,
1213
- "loss": 0.6684,
1214
- "step": 172
1215
- },
1216
- {
1217
- "epoch": 0.8439024390243902,
1218
- "grad_norm": 1.8811930418014526,
1219
- "learning_rate": 4.913653851466057e-06,
1220
- "loss": 0.4537,
1221
- "step": 173
1222
- },
1223
- {
1224
- "epoch": 0.848780487804878,
1225
- "grad_norm": 1.7003443241119385,
1226
- "learning_rate": 4.912652812533291e-06,
1227
- "loss": 0.5926,
1228
- "step": 174
1229
- },
1230
- {
1231
- "epoch": 0.8536585365853658,
1232
- "grad_norm": 1.6899495124816895,
1233
- "learning_rate": 4.911646107467472e-06,
1234
- "loss": 0.6797,
1235
- "step": 175
1236
- },
1237
- {
1238
- "epoch": 0.8585365853658536,
1239
- "grad_norm": 1.5597474575042725,
1240
- "learning_rate": 4.9106337386328524e-06,
1241
- "loss": 0.7363,
1242
- "step": 176
1243
- },
1244
- {
1245
- "epoch": 0.8634146341463415,
1246
- "grad_norm": 1.7820264101028442,
1247
- "learning_rate": 4.909615708406991e-06,
1248
- "loss": 0.7277,
1249
- "step": 177
1250
- },
1251
- {
1252
- "epoch": 0.8682926829268293,
1253
- "grad_norm": 1.5444871187210083,
1254
- "learning_rate": 4.908592019180738e-06,
1255
- "loss": 0.4991,
1256
- "step": 178
1257
- },
1258
- {
1259
- "epoch": 0.8731707317073171,
1260
- "grad_norm": 1.1890966892242432,
1261
- "learning_rate": 4.907562673358234e-06,
1262
- "loss": 0.4345,
1263
- "step": 179
1264
- },
1265
- {
1266
- "epoch": 0.8780487804878049,
1267
- "grad_norm": 1.6008920669555664,
1268
- "learning_rate": 4.906527673356907e-06,
1269
- "loss": 0.506,
1270
- "step": 180
1271
- },
1272
- {
1273
- "epoch": 0.8829268292682927,
1274
- "grad_norm": 1.3028374910354614,
1275
- "learning_rate": 4.905487021607462e-06,
1276
- "loss": 0.4031,
1277
- "step": 181
1278
- },
1279
- {
1280
- "epoch": 0.8878048780487805,
1281
- "grad_norm": 2.086660146713257,
1282
- "learning_rate": 4.904440720553876e-06,
1283
- "loss": 0.6167,
1284
- "step": 182
1285
- },
1286
- {
1287
- "epoch": 0.8926829268292683,
1288
- "grad_norm": 1.5964947938919067,
1289
- "learning_rate": 4.903388772653396e-06,
1290
- "loss": 0.5923,
1291
- "step": 183
1292
- },
1293
- {
1294
- "epoch": 0.8975609756097561,
1295
- "grad_norm": 1.7292804718017578,
1296
- "learning_rate": 4.902331180376529e-06,
1297
- "loss": 0.6047,
1298
- "step": 184
1299
- },
1300
- {
1301
- "epoch": 0.9024390243902439,
1302
- "grad_norm": 1.6994556188583374,
1303
- "learning_rate": 4.901267946207038e-06,
1304
- "loss": 0.7615,
1305
- "step": 185
1306
- },
1307
- {
1308
- "epoch": 0.9073170731707317,
1309
- "grad_norm": 1.9247877597808838,
1310
- "learning_rate": 4.900199072641937e-06,
1311
- "loss": 0.5834,
1312
- "step": 186
1313
- },
1314
- {
1315
- "epoch": 0.9121951219512195,
1316
- "grad_norm": 1.4916514158248901,
1317
- "learning_rate": 4.899124562191484e-06,
1318
- "loss": 0.4737,
1319
- "step": 187
1320
- },
1321
- {
1322
- "epoch": 0.9170731707317074,
1323
- "grad_norm": 1.9706366062164307,
1324
- "learning_rate": 4.8980444173791735e-06,
1325
- "loss": 0.4418,
1326
- "step": 188
1327
- },
1328
- {
1329
- "epoch": 0.9219512195121952,
1330
- "grad_norm": 2.295691728591919,
1331
- "learning_rate": 4.896958640741735e-06,
1332
- "loss": 0.7035,
1333
- "step": 189
1334
- },
1335
- {
1336
- "epoch": 0.926829268292683,
1337
- "grad_norm": 2.459785223007202,
1338
- "learning_rate": 4.895867234829121e-06,
1339
- "loss": 0.7988,
1340
- "step": 190
1341
- },
1342
- {
1343
- "epoch": 0.9317073170731708,
1344
- "grad_norm": 1.6266491413116455,
1345
- "learning_rate": 4.894770202204509e-06,
1346
- "loss": 0.5665,
1347
- "step": 191
1348
- },
1349
- {
1350
- "epoch": 0.9365853658536586,
1351
- "grad_norm": 1.549321174621582,
1352
- "learning_rate": 4.893667545444285e-06,
1353
- "loss": 0.5774,
1354
- "step": 192
1355
- },
1356
- {
1357
- "epoch": 0.9414634146341463,
1358
- "grad_norm": 2.014610767364502,
1359
- "learning_rate": 4.8925592671380495e-06,
1360
- "loss": 0.5668,
1361
- "step": 193
1362
- },
1363
- {
1364
- "epoch": 0.9463414634146341,
1365
- "grad_norm": 1.3015650510787964,
1366
- "learning_rate": 4.891445369888601e-06,
1367
- "loss": 0.427,
1368
- "step": 194
1369
- },
1370
- {
1371
- "epoch": 0.9512195121951219,
1372
- "grad_norm": 1.6933586597442627,
1373
- "learning_rate": 4.890325856311936e-06,
1374
- "loss": 0.7354,
1375
- "step": 195
1376
- },
1377
- {
1378
- "epoch": 0.9560975609756097,
1379
- "grad_norm": 2.1092705726623535,
1380
- "learning_rate": 4.889200729037241e-06,
1381
- "loss": 0.7039,
1382
- "step": 196
1383
- },
1384
- {
1385
- "epoch": 0.9609756097560975,
1386
- "grad_norm": 1.625111699104309,
1387
- "learning_rate": 4.888069990706884e-06,
1388
- "loss": 0.5369,
1389
- "step": 197
1390
- },
1391
- {
1392
- "epoch": 0.9658536585365853,
1393
- "grad_norm": 1.4547514915466309,
1394
- "learning_rate": 4.886933643976414e-06,
1395
- "loss": 0.3645,
1396
- "step": 198
1397
- },
1398
- {
1399
- "epoch": 0.9707317073170731,
1400
- "grad_norm": 1.832529902458191,
1401
- "learning_rate": 4.885791691514548e-06,
1402
- "loss": 0.422,
1403
- "step": 199
1404
- },
1405
- {
1406
- "epoch": 0.975609756097561,
1407
- "grad_norm": 1.533402442932129,
1408
- "learning_rate": 4.884644136003172e-06,
1409
- "loss": 0.4698,
1410
- "step": 200
1411
- },
1412
- {
1413
- "epoch": 0.9804878048780488,
1414
- "grad_norm": 1.9563912153244019,
1415
- "learning_rate": 4.883490980137327e-06,
1416
- "loss": 1.1273,
1417
- "step": 201
1418
- },
1419
- {
1420
- "epoch": 0.9853658536585366,
1421
- "grad_norm": 1.7044615745544434,
1422
- "learning_rate": 4.882332226625208e-06,
1423
- "loss": 0.6056,
1424
- "step": 202
1425
- },
1426
- {
1427
- "epoch": 0.9902439024390244,
1428
- "grad_norm": 1.6405285596847534,
1429
- "learning_rate": 4.881167878188158e-06,
1430
- "loss": 0.7204,
1431
- "step": 203
1432
- },
1433
- {
1434
- "epoch": 0.9951219512195122,
1435
- "grad_norm": 1.5838991403579712,
1436
- "learning_rate": 4.8799979375606565e-06,
1437
- "loss": 0.553,
1438
- "step": 204
1439
- },
1440
- {
1441
- "epoch": 1.0,
1442
- "grad_norm": 1.6604121923446655,
1443
- "learning_rate": 4.878822407490319e-06,
1444
- "loss": 0.4694,
1445
- "step": 205
1446
- }
1447
- ],
1448
- "logging_steps": 1,
1449
- "max_steps": 2050,
1450
- "num_input_tokens_seen": 0,
1451
- "num_train_epochs": 10,
1452
- "save_steps": 208,
1453
- "stateful_callbacks": {
1454
- "TrainerControl": {
1455
- "args": {
1456
- "should_epoch_stop": false,
1457
- "should_evaluate": false,
1458
- "should_log": false,
1459
- "should_save": true,
1460
- "should_training_stop": false
1461
- },
1462
- "attributes": {}
1463
- }
1464
- },
1465
- "total_flos": 6.049419025291674e+16,
1466
- "train_batch_size": 1,
1467
- "trial_name": null,
1468
- "trial_params": null
1469
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-205/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
qwen2_5_7b_instruct/limo/checkpoint-410/added_tokens.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "</tool_call>": 151658,
3
- "<tool_call>": 151657,
4
- "<|box_end|>": 151649,
5
- "<|box_start|>": 151648,
6
- "<|endoftext|>": 151643,
7
- "<|file_sep|>": 151664,
8
- "<|fim_middle|>": 151660,
9
- "<|fim_pad|>": 151662,
10
- "<|fim_prefix|>": 151659,
11
- "<|fim_suffix|>": 151661,
12
- "<|im_end|>": 151645,
13
- "<|im_start|>": 151644,
14
- "<|image_pad|>": 151655,
15
- "<|object_ref_end|>": 151647,
16
- "<|object_ref_start|>": 151646,
17
- "<|quad_end|>": 151651,
18
- "<|quad_start|>": 151650,
19
- "<|repo_name|>": 151663,
20
- "<|video_pad|>": 151656,
21
- "<|vision_end|>": 151653,
22
- "<|vision_pad|>": 151654,
23
- "<|vision_start|>": 151652
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/chat_template.jinja DELETED
@@ -1,54 +0,0 @@
1
- {%- if tools %}
2
- {{- '<|im_start|>system\n' }}
3
- {%- if messages[0]['role'] == 'system' %}
4
- {{- messages[0]['content'] }}
5
- {%- else %}
6
- {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
- {%- endif %}
8
- {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
- {%- for tool in tools %}
10
- {{- "\n" }}
11
- {{- tool | tojson }}
12
- {%- endfor %}
13
- {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
- {%- else %}
15
- {%- if messages[0]['role'] == 'system' %}
16
- {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
- {%- else %}
18
- {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
- {%- endif %}
20
- {%- endif %}
21
- {%- for message in messages %}
22
- {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
- {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
- {%- elif message.role == "assistant" %}
25
- {{- '<|im_start|>' + message.role }}
26
- {%- if message.content %}
27
- {{- '\n' + message.content }}
28
- {%- endif %}
29
- {%- for tool_call in message.tool_calls %}
30
- {%- if tool_call.function is defined %}
31
- {%- set tool_call = tool_call.function %}
32
- {%- endif %}
33
- {{- '\n<tool_call>\n{"name": "' }}
34
- {{- tool_call.name }}
35
- {{- '", "arguments": ' }}
36
- {{- tool_call.arguments | tojson }}
37
- {{- '}\n</tool_call>' }}
38
- {%- endfor %}
39
- {{- '<|im_end|>\n' }}
40
- {%- elif message.role == "tool" %}
41
- {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
- {{- '<|im_start|>user' }}
43
- {%- endif %}
44
- {{- '\n<tool_response>\n' }}
45
- {{- message.content }}
46
- {{- '\n</tool_response>' }}
47
- {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
- {{- '<|im_end|>\n' }}
49
- {%- endif %}
50
- {%- endif %}
51
- {%- endfor %}
52
- {%- if add_generation_prompt %}
53
- {{- '<|im_start|>assistant\n' }}
54
- {%- endif %}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/config.json DELETED
@@ -1,58 +0,0 @@
1
- {
2
- "architectures": [
3
- "Qwen2ForCausalLM"
4
- ],
5
- "attention_dropout": 0.0,
6
- "bos_token_id": 151643,
7
- "eos_token_id": 151645,
8
- "hidden_act": "silu",
9
- "hidden_size": 3584,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 18944,
12
- "layer_types": [
13
- "full_attention",
14
- "full_attention",
15
- "full_attention",
16
- "full_attention",
17
- "full_attention",
18
- "full_attention",
19
- "full_attention",
20
- "full_attention",
21
- "full_attention",
22
- "full_attention",
23
- "full_attention",
24
- "full_attention",
25
- "full_attention",
26
- "full_attention",
27
- "full_attention",
28
- "full_attention",
29
- "full_attention",
30
- "full_attention",
31
- "full_attention",
32
- "full_attention",
33
- "full_attention",
34
- "full_attention",
35
- "full_attention",
36
- "full_attention",
37
- "full_attention",
38
- "full_attention",
39
- "full_attention",
40
- "full_attention"
41
- ],
42
- "max_position_embeddings": 32768,
43
- "max_window_layers": 28,
44
- "model_type": "qwen2",
45
- "num_attention_heads": 28,
46
- "num_hidden_layers": 28,
47
- "num_key_value_heads": 4,
48
- "rms_norm_eps": 1e-06,
49
- "rope_scaling": null,
50
- "rope_theta": 1000000.0,
51
- "sliding_window": null,
52
- "tie_word_embeddings": false,
53
- "torch_dtype": "float32",
54
- "transformers_version": "4.55.0",
55
- "use_cache": false,
56
- "use_sliding_window": false,
57
- "vocab_size": 152064
58
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 151643,
4
- "eos_token_id": 151645,
5
- "transformers_version": "4.55.0",
6
- "use_cache": false
7
- }
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00001-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d82701ff300ebad903415b176cfa777e8714b24f3ecb003b1ed41d703ac54631
3
- size 4976687216
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00002-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:629a3c5e12ada93dbd0cb38bb8c182ef74ada5c74aa9a23f311c0e797f0cf077
3
- size 4778622352
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00003-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3503850784b9e095887624738d4bcf3eaf85c4ffa6a53fa32b1459cfb8862a0a
3
- size 4932743960
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00004-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6e39e5ff4440720d3fb16192d6d955ec17611b78a6098ec03364674b6f27eda
3
- size 4932743992
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00005-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb1a973da6a002aea3c7ec28073ea9e65f8368291aad4514baa4db323c3c3a25
3
- size 4998852296
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00006-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6c91bf3f8eb74b488d22e86b10b1e45955db12071ec0e7c4ba8234180a03b26f
3
- size 3662865184
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model-00007-of-00007.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1bc69adefa7f4de04da5c4e75e123a8fedb8bf1cadc60d995f8524ecc925b5d
3
- size 2179989632
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/model.safetensors.index.json DELETED
@@ -1,347 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_parameters": 1903904128,
4
- "total_size": 30462466048
5
- },
6
- "weight_map": {
7
- "lm_head.weight": "model-00007-of-00007.safetensors",
8
- "model.embed_tokens.weight": "model-00001-of-00007.safetensors",
9
- "model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
10
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
11
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
12
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
13
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
14
- "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
15
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
16
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
17
- "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
18
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
19
- "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
20
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
21
- "model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
22
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
23
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
24
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
25
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
26
- "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
27
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
28
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
29
- "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
30
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
31
- "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
32
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
33
- "model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
34
- "model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
35
- "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
36
- "model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
37
- "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
38
- "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
39
- "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
40
- "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
41
- "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
42
- "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
43
- "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
44
- "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
45
- "model.layers.11.input_layernorm.weight": "model-00003-of-00007.safetensors",
46
- "model.layers.11.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
47
- "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
48
- "model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
49
- "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
50
- "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
51
- "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
52
- "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
53
- "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
54
- "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
55
- "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
56
- "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
57
- "model.layers.12.input_layernorm.weight": "model-00003-of-00007.safetensors",
58
- "model.layers.12.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
59
- "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
60
- "model.layers.12.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
61
- "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
62
- "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
63
- "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
64
- "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
65
- "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
66
- "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
67
- "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
68
- "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
69
- "model.layers.13.input_layernorm.weight": "model-00004-of-00007.safetensors",
70
- "model.layers.13.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
71
- "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
72
- "model.layers.13.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
73
- "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
74
- "model.layers.13.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
75
- "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
76
- "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
77
- "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
78
- "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
79
- "model.layers.13.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
80
- "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
81
- "model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
82
- "model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
83
- "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
84
- "model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
85
- "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
86
- "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
87
- "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
88
- "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
89
- "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
90
- "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
91
- "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
92
- "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
93
- "model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
94
- "model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
95
- "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
96
- "model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
97
- "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
98
- "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
99
- "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
100
- "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
101
- "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
102
- "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
103
- "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
104
- "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
105
- "model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
106
- "model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
107
- "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
108
- "model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
109
- "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
110
- "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
111
- "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
112
- "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
113
- "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
114
- "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
115
- "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
116
- "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
117
- "model.layers.17.input_layernorm.weight": "model-00004-of-00007.safetensors",
118
- "model.layers.17.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
119
- "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
120
- "model.layers.17.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
121
- "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
122
- "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
123
- "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
124
- "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
125
- "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
126
- "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
127
- "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
128
- "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
129
- "model.layers.18.input_layernorm.weight": "model-00005-of-00007.safetensors",
130
- "model.layers.18.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
131
- "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
132
- "model.layers.18.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
133
- "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
134
- "model.layers.18.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
135
- "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
136
- "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
137
- "model.layers.18.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
138
- "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
139
- "model.layers.18.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
140
- "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
141
- "model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
142
- "model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
143
- "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
144
- "model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
145
- "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
146
- "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
147
- "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
148
- "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
149
- "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
150
- "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
151
- "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
152
- "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
153
- "model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
154
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
155
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
156
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
157
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
158
- "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
159
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
160
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
161
- "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
162
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
163
- "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
164
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
165
- "model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
166
- "model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
167
- "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
168
- "model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
169
- "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
170
- "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
171
- "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
172
- "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
173
- "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
174
- "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
175
- "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
176
- "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
177
- "model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
178
- "model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
179
- "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
180
- "model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
181
- "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
182
- "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
183
- "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
184
- "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
185
- "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
186
- "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
187
- "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
188
- "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
189
- "model.layers.22.input_layernorm.weight": "model-00005-of-00007.safetensors",
190
- "model.layers.22.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
191
- "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
192
- "model.layers.22.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
193
- "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
194
- "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
195
- "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
196
- "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
197
- "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
198
- "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
199
- "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
200
- "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
201
- "model.layers.23.input_layernorm.weight": "model-00005-of-00007.safetensors",
202
- "model.layers.23.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
203
- "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
204
- "model.layers.23.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
205
- "model.layers.23.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
206
- "model.layers.23.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
207
- "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
208
- "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
209
- "model.layers.23.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
210
- "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
211
- "model.layers.23.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
212
- "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
213
- "model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
214
- "model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
215
- "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
216
- "model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
217
- "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
218
- "model.layers.24.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
219
- "model.layers.24.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
220
- "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
221
- "model.layers.24.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
222
- "model.layers.24.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
223
- "model.layers.24.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
224
- "model.layers.24.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
225
- "model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
226
- "model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
227
- "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
228
- "model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
229
- "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
230
- "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
231
- "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
232
- "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
233
- "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
234
- "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
235
- "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
236
- "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
237
- "model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
238
- "model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
239
- "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
240
- "model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
241
- "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
242
- "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
243
- "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
244
- "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
245
- "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
246
- "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
247
- "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
248
- "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
249
- "model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
250
- "model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
251
- "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
252
- "model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
253
- "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
254
- "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
255
- "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
256
- "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
257
- "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
258
- "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
259
- "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
260
- "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
261
- "model.layers.3.input_layernorm.weight": "model-00002-of-00007.safetensors",
262
- "model.layers.3.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
263
- "model.layers.3.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
264
- "model.layers.3.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
265
- "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
266
- "model.layers.3.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
267
- "model.layers.3.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
268
- "model.layers.3.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
269
- "model.layers.3.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
270
- "model.layers.3.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
271
- "model.layers.3.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
272
- "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
273
- "model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
274
- "model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
275
- "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
276
- "model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
277
- "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
278
- "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
279
- "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
280
- "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
281
- "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
282
- "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
283
- "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
284
- "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
285
- "model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
286
- "model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
287
- "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
288
- "model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
289
- "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
290
- "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
291
- "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
292
- "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
293
- "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
294
- "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
295
- "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
296
- "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
297
- "model.layers.6.input_layernorm.weight": "model-00002-of-00007.safetensors",
298
- "model.layers.6.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
299
- "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
300
- "model.layers.6.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
301
- "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
302
- "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
303
- "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
304
- "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
305
- "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
306
- "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
307
- "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
308
- "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
309
- "model.layers.7.input_layernorm.weight": "model-00002-of-00007.safetensors",
310
- "model.layers.7.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
311
- "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
312
- "model.layers.7.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
313
- "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
314
- "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
315
- "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
316
- "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
317
- "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
318
- "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
319
- "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
320
- "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
321
- "model.layers.8.input_layernorm.weight": "model-00003-of-00007.safetensors",
322
- "model.layers.8.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
323
- "model.layers.8.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
324
- "model.layers.8.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
325
- "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
326
- "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
327
- "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
328
- "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
329
- "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
330
- "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
331
- "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
332
- "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
333
- "model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
334
- "model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
335
- "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
336
- "model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
337
- "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
338
- "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
339
- "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
340
- "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
341
- "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
342
- "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
343
- "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
344
- "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
345
- "model.norm.weight": "model-00006-of-00007.safetensors"
346
- }
347
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:bbe0d720c4c75a6a04213fa3b64bacbe794718a53e2b56ebb67a1a795014dfad
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:72452d3138d0ca2ff89429e3294a834ae7a68e8596fc757735ca56ae52509d57
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_2.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f36e306fb8ebcf53a167bfd6c9af74db410a269ada1e619e3e816f5269543b9d
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/rng_state_3.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:bb47ce0c6f815a6f8302b0e3819b4c2315ca71dae3138d97fdceb765cdd0a039
3
- size 15024
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:64b87c070356d49af38984aef4437a4edcf8b6880ba0aec9af0e093c40c21ff3
3
- size 1064
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/special_tokens_map.json DELETED
@@ -1,31 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>",
5
- "<|object_ref_start|>",
6
- "<|object_ref_end|>",
7
- "<|box_start|>",
8
- "<|box_end|>",
9
- "<|quad_start|>",
10
- "<|quad_end|>",
11
- "<|vision_start|>",
12
- "<|vision_end|>",
13
- "<|vision_pad|>",
14
- "<|image_pad|>",
15
- "<|video_pad|>"
16
- ],
17
- "eos_token": {
18
- "content": "<|im_end|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/tokenizer.json DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
- size 11421896
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/tokenizer_config.json DELETED
@@ -1,208 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_prefix_space": false,
4
- "added_tokens_decoder": {
5
- "151643": {
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
- "151644": {
14
- "content": "<|im_start|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "151645": {
22
- "content": "<|im_end|>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "151646": {
30
- "content": "<|object_ref_start|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "151647": {
38
- "content": "<|object_ref_end|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "151648": {
46
- "content": "<|box_start|>",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "151649": {
54
- "content": "<|box_end|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": false,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "151650": {
62
- "content": "<|quad_start|>",
63
- "lstrip": false,
64
- "normalized": false,
65
- "rstrip": false,
66
- "single_word": false,
67
- "special": true
68
- },
69
- "151651": {
70
- "content": "<|quad_end|>",
71
- "lstrip": false,
72
- "normalized": false,
73
- "rstrip": false,
74
- "single_word": false,
75
- "special": true
76
- },
77
- "151652": {
78
- "content": "<|vision_start|>",
79
- "lstrip": false,
80
- "normalized": false,
81
- "rstrip": false,
82
- "single_word": false,
83
- "special": true
84
- },
85
- "151653": {
86
- "content": "<|vision_end|>",
87
- "lstrip": false,
88
- "normalized": false,
89
- "rstrip": false,
90
- "single_word": false,
91
- "special": true
92
- },
93
- "151654": {
94
- "content": "<|vision_pad|>",
95
- "lstrip": false,
96
- "normalized": false,
97
- "rstrip": false,
98
- "single_word": false,
99
- "special": true
100
- },
101
- "151655": {
102
- "content": "<|image_pad|>",
103
- "lstrip": false,
104
- "normalized": false,
105
- "rstrip": false,
106
- "single_word": false,
107
- "special": true
108
- },
109
- "151656": {
110
- "content": "<|video_pad|>",
111
- "lstrip": false,
112
- "normalized": false,
113
- "rstrip": false,
114
- "single_word": false,
115
- "special": true
116
- },
117
- "151657": {
118
- "content": "<tool_call>",
119
- "lstrip": false,
120
- "normalized": false,
121
- "rstrip": false,
122
- "single_word": false,
123
- "special": false
124
- },
125
- "151658": {
126
- "content": "</tool_call>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false,
131
- "special": false
132
- },
133
- "151659": {
134
- "content": "<|fim_prefix|>",
135
- "lstrip": false,
136
- "normalized": false,
137
- "rstrip": false,
138
- "single_word": false,
139
- "special": false
140
- },
141
- "151660": {
142
- "content": "<|fim_middle|>",
143
- "lstrip": false,
144
- "normalized": false,
145
- "rstrip": false,
146
- "single_word": false,
147
- "special": false
148
- },
149
- "151661": {
150
- "content": "<|fim_suffix|>",
151
- "lstrip": false,
152
- "normalized": false,
153
- "rstrip": false,
154
- "single_word": false,
155
- "special": false
156
- },
157
- "151662": {
158
- "content": "<|fim_pad|>",
159
- "lstrip": false,
160
- "normalized": false,
161
- "rstrip": false,
162
- "single_word": false,
163
- "special": false
164
- },
165
- "151663": {
166
- "content": "<|repo_name|>",
167
- "lstrip": false,
168
- "normalized": false,
169
- "rstrip": false,
170
- "single_word": false,
171
- "special": false
172
- },
173
- "151664": {
174
- "content": "<|file_sep|>",
175
- "lstrip": false,
176
- "normalized": false,
177
- "rstrip": false,
178
- "single_word": false,
179
- "special": false
180
- }
181
- },
182
- "additional_special_tokens": [
183
- "<|im_start|>",
184
- "<|im_end|>",
185
- "<|object_ref_start|>",
186
- "<|object_ref_end|>",
187
- "<|box_start|>",
188
- "<|box_end|>",
189
- "<|quad_start|>",
190
- "<|quad_end|>",
191
- "<|vision_start|>",
192
- "<|vision_end|>",
193
- "<|vision_pad|>",
194
- "<|image_pad|>",
195
- "<|video_pad|>"
196
- ],
197
- "bos_token": null,
198
- "clean_up_tokenization_spaces": false,
199
- "eos_token": "<|im_end|>",
200
- "errors": "replace",
201
- "extra_special_tokens": {},
202
- "model_max_length": 131072,
203
- "pad_token": "<|endoftext|>",
204
- "padding_side": "right",
205
- "split_special_tokens": false,
206
- "tokenizer_class": "Qwen2Tokenizer",
207
- "unk_token": null
208
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/trainer_state.json DELETED
@@ -1,2904 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 2.0,
6
- "eval_steps": 500,
7
- "global_step": 410,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.004878048780487805,
14
- "grad_norm": 6.251913070678711,
15
- "learning_rate": 5e-06,
16
- "loss": 0.7742,
17
- "step": 1
18
- },
19
- {
20
- "epoch": 0.00975609756097561,
21
- "grad_norm": 6.38546085357666,
22
- "learning_rate": 4.999997064365715e-06,
23
- "loss": 1.0365,
24
- "step": 2
25
- },
26
- {
27
- "epoch": 0.014634146341463415,
28
- "grad_norm": 3.223087787628174,
29
- "learning_rate": 4.999988257469751e-06,
30
- "loss": 0.6448,
31
- "step": 3
32
- },
33
- {
34
- "epoch": 0.01951219512195122,
35
- "grad_norm": 2.750905752182007,
36
- "learning_rate": 4.999973579332793e-06,
37
- "loss": 0.6328,
38
- "step": 4
39
- },
40
- {
41
- "epoch": 0.024390243902439025,
42
- "grad_norm": 4.2081828117370605,
43
- "learning_rate": 4.999953029989312e-06,
44
- "loss": 0.6984,
45
- "step": 5
46
- },
47
- {
48
- "epoch": 0.02926829268292683,
49
- "grad_norm": 4.167655944824219,
50
- "learning_rate": 4.999926609487568e-06,
51
- "loss": 0.9847,
52
- "step": 6
53
- },
54
- {
55
- "epoch": 0.03414634146341464,
56
- "grad_norm": 4.408457279205322,
57
- "learning_rate": 4.9998943178896106e-06,
58
- "loss": 0.8203,
59
- "step": 7
60
- },
61
- {
62
- "epoch": 0.03902439024390244,
63
- "grad_norm": 3.1360676288604736,
64
- "learning_rate": 4.999856155271276e-06,
65
- "loss": 0.6513,
66
- "step": 8
67
- },
68
- {
69
- "epoch": 0.04390243902439024,
70
- "grad_norm": 3.3297390937805176,
71
- "learning_rate": 4.999812121722191e-06,
72
- "loss": 0.7803,
73
- "step": 9
74
- },
75
- {
76
- "epoch": 0.04878048780487805,
77
- "grad_norm": 3.5323646068573,
78
- "learning_rate": 4.999762217345766e-06,
79
- "loss": 0.7527,
80
- "step": 10
81
- },
82
- {
83
- "epoch": 0.05365853658536585,
84
- "grad_norm": 1.9139900207519531,
85
- "learning_rate": 4.999706442259205e-06,
86
- "loss": 0.5721,
87
- "step": 11
88
- },
89
- {
90
- "epoch": 0.05853658536585366,
91
- "grad_norm": 2.2962052822113037,
92
- "learning_rate": 4.999644796593492e-06,
93
- "loss": 0.6354,
94
- "step": 12
95
- },
96
- {
97
- "epoch": 0.06341463414634146,
98
- "grad_norm": 1.8342329263687134,
99
- "learning_rate": 4.999577280493407e-06,
100
- "loss": 0.5259,
101
- "step": 13
102
- },
103
- {
104
- "epoch": 0.06829268292682927,
105
- "grad_norm": 2.8183422088623047,
106
- "learning_rate": 4.99950389411751e-06,
107
- "loss": 0.8089,
108
- "step": 14
109
- },
110
- {
111
- "epoch": 0.07317073170731707,
112
- "grad_norm": 2.2182018756866455,
113
- "learning_rate": 4.999424637638148e-06,
114
- "loss": 0.6002,
115
- "step": 15
116
- },
117
- {
118
- "epoch": 0.07804878048780488,
119
- "grad_norm": 2.4346132278442383,
120
- "learning_rate": 4.999339511241458e-06,
121
- "loss": 0.6651,
122
- "step": 16
123
- },
124
- {
125
- "epoch": 0.08292682926829269,
126
- "grad_norm": 1.9976483583450317,
127
- "learning_rate": 4.9992485151273584e-06,
128
- "loss": 0.655,
129
- "step": 17
130
- },
131
- {
132
- "epoch": 0.08780487804878048,
133
- "grad_norm": 2.1938087940216064,
134
- "learning_rate": 4.999151649509554e-06,
135
- "loss": 0.5523,
136
- "step": 18
137
- },
138
- {
139
- "epoch": 0.09268292682926829,
140
- "grad_norm": 2.5118227005004883,
141
- "learning_rate": 4.9990489146155356e-06,
142
- "loss": 0.8695,
143
- "step": 19
144
- },
145
- {
146
- "epoch": 0.0975609756097561,
147
- "grad_norm": 2.111804723739624,
148
- "learning_rate": 4.9989403106865765e-06,
149
- "loss": 0.8075,
150
- "step": 20
151
- },
152
- {
153
- "epoch": 0.1024390243902439,
154
- "grad_norm": 2.2701148986816406,
155
- "learning_rate": 4.9988258379777334e-06,
156
- "loss": 0.6715,
157
- "step": 21
158
- },
159
- {
160
- "epoch": 0.1073170731707317,
161
- "grad_norm": 2.2692644596099854,
162
- "learning_rate": 4.998705496757846e-06,
163
- "loss": 0.7627,
164
- "step": 22
165
- },
166
- {
167
- "epoch": 0.11219512195121951,
168
- "grad_norm": 1.8187084197998047,
169
- "learning_rate": 4.998579287309538e-06,
170
- "loss": 0.9055,
171
- "step": 23
172
- },
173
- {
174
- "epoch": 0.11707317073170732,
175
- "grad_norm": 1.498978614807129,
176
- "learning_rate": 4.998447209929211e-06,
177
- "loss": 0.4455,
178
- "step": 24
179
- },
180
- {
181
- "epoch": 0.12195121951219512,
182
- "grad_norm": 2.2440743446350098,
183
- "learning_rate": 4.998309264927053e-06,
184
- "loss": 0.4843,
185
- "step": 25
186
- },
187
- {
188
- "epoch": 0.12682926829268293,
189
- "grad_norm": 1.8050298690795898,
190
- "learning_rate": 4.998165452627025e-06,
191
- "loss": 0.6016,
192
- "step": 26
193
- },
194
- {
195
- "epoch": 0.13170731707317074,
196
- "grad_norm": 1.9306098222732544,
197
- "learning_rate": 4.998015773366874e-06,
198
- "loss": 0.5469,
199
- "step": 27
200
- },
201
- {
202
- "epoch": 0.13658536585365855,
203
- "grad_norm": 2.152693510055542,
204
- "learning_rate": 4.997860227498122e-06,
205
- "loss": 0.5583,
206
- "step": 28
207
- },
208
- {
209
- "epoch": 0.14146341463414633,
210
- "grad_norm": 2.3152549266815186,
211
- "learning_rate": 4.99769881538607e-06,
212
- "loss": 0.6947,
213
- "step": 29
214
- },
215
- {
216
- "epoch": 0.14634146341463414,
217
- "grad_norm": 1.830079197883606,
218
- "learning_rate": 4.997531537409794e-06,
219
- "loss": 0.6863,
220
- "step": 30
221
- },
222
- {
223
- "epoch": 0.15121951219512195,
224
- "grad_norm": 2.574989080429077,
225
- "learning_rate": 4.99735839396215e-06,
226
- "loss": 0.6081,
227
- "step": 31
228
- },
229
- {
230
- "epoch": 0.15609756097560976,
231
- "grad_norm": 1.9973617792129517,
232
- "learning_rate": 4.9971793854497655e-06,
233
- "loss": 0.5624,
234
- "step": 32
235
- },
236
- {
237
- "epoch": 0.16097560975609757,
238
- "grad_norm": 1.7976499795913696,
239
- "learning_rate": 4.996994512293042e-06,
240
- "loss": 0.514,
241
- "step": 33
242
- },
243
- {
244
- "epoch": 0.16585365853658537,
245
- "grad_norm": 1.865333914756775,
246
- "learning_rate": 4.996803774926157e-06,
247
- "loss": 0.5389,
248
- "step": 34
249
- },
250
- {
251
- "epoch": 0.17073170731707318,
252
- "grad_norm": 2.7497925758361816,
253
- "learning_rate": 4.996607173797059e-06,
254
- "loss": 1.092,
255
- "step": 35
256
- },
257
- {
258
- "epoch": 0.17560975609756097,
259
- "grad_norm": 2.1576943397521973,
260
- "learning_rate": 4.996404709367466e-06,
261
- "loss": 0.6338,
262
- "step": 36
263
- },
264
- {
265
- "epoch": 0.18048780487804877,
266
- "grad_norm": 1.9310261011123657,
267
- "learning_rate": 4.996196382112868e-06,
268
- "loss": 0.5288,
269
- "step": 37
270
- },
271
- {
272
- "epoch": 0.18536585365853658,
273
- "grad_norm": 2.566770553588867,
274
- "learning_rate": 4.9959821925225235e-06,
275
- "loss": 0.7568,
276
- "step": 38
277
- },
278
- {
279
- "epoch": 0.1902439024390244,
280
- "grad_norm": 1.8456333875656128,
281
- "learning_rate": 4.995762141099456e-06,
282
- "loss": 0.6288,
283
- "step": 39
284
- },
285
- {
286
- "epoch": 0.1951219512195122,
287
- "grad_norm": 2.9497272968292236,
288
- "learning_rate": 4.995536228360461e-06,
289
- "loss": 0.8085,
290
- "step": 40
291
- },
292
- {
293
- "epoch": 0.2,
294
- "grad_norm": 2.113572359085083,
295
- "learning_rate": 4.995304454836095e-06,
296
- "loss": 0.7446,
297
- "step": 41
298
- },
299
- {
300
- "epoch": 0.2048780487804878,
301
- "grad_norm": 2.083073139190674,
302
- "learning_rate": 4.9950668210706795e-06,
303
- "loss": 0.61,
304
- "step": 42
305
- },
306
- {
307
- "epoch": 0.2097560975609756,
308
- "grad_norm": 1.7291383743286133,
309
- "learning_rate": 4.994823327622299e-06,
310
- "loss": 0.4961,
311
- "step": 43
312
- },
313
- {
314
- "epoch": 0.2146341463414634,
315
- "grad_norm": 2.4040563106536865,
316
- "learning_rate": 4.9945739750628e-06,
317
- "loss": 0.6091,
318
- "step": 44
319
- },
320
- {
321
- "epoch": 0.21951219512195122,
322
- "grad_norm": 1.6997170448303223,
323
- "learning_rate": 4.994318763977789e-06,
324
- "loss": 0.6408,
325
- "step": 45
326
- },
327
- {
328
- "epoch": 0.22439024390243903,
329
- "grad_norm": 1.5265686511993408,
330
- "learning_rate": 4.994057694966632e-06,
331
- "loss": 0.4422,
332
- "step": 46
333
- },
334
- {
335
- "epoch": 0.22926829268292684,
336
- "grad_norm": 1.6987519264221191,
337
- "learning_rate": 4.993790768642449e-06,
338
- "loss": 0.8084,
339
- "step": 47
340
- },
341
- {
342
- "epoch": 0.23414634146341465,
343
- "grad_norm": 1.9533758163452148,
344
- "learning_rate": 4.99351798563212e-06,
345
- "loss": 0.9267,
346
- "step": 48
347
- },
348
- {
349
- "epoch": 0.23902439024390243,
350
- "grad_norm": 2.0147147178649902,
351
- "learning_rate": 4.993239346576278e-06,
352
- "loss": 0.7258,
353
- "step": 49
354
- },
355
- {
356
- "epoch": 0.24390243902439024,
357
- "grad_norm": 1.7322336435317993,
358
- "learning_rate": 4.99295485212931e-06,
359
- "loss": 0.7664,
360
- "step": 50
361
- },
362
- {
363
- "epoch": 0.24878048780487805,
364
- "grad_norm": 1.972558856010437,
365
- "learning_rate": 4.992664502959351e-06,
366
- "loss": 0.6476,
367
- "step": 51
368
- },
369
- {
370
- "epoch": 0.25365853658536586,
371
- "grad_norm": 2.168142557144165,
372
- "learning_rate": 4.99236829974829e-06,
373
- "loss": 0.6274,
374
- "step": 52
375
- },
376
- {
377
- "epoch": 0.25853658536585367,
378
- "grad_norm": 2.1706929206848145,
379
- "learning_rate": 4.992066243191762e-06,
380
- "loss": 0.8237,
381
- "step": 53
382
- },
383
- {
384
- "epoch": 0.2634146341463415,
385
- "grad_norm": 1.7400177717208862,
386
- "learning_rate": 4.991758333999148e-06,
387
- "loss": 0.6377,
388
- "step": 54
389
- },
390
- {
391
- "epoch": 0.2682926829268293,
392
- "grad_norm": 2.02597713470459,
393
- "learning_rate": 4.991444572893575e-06,
394
- "loss": 0.5368,
395
- "step": 55
396
- },
397
- {
398
- "epoch": 0.2731707317073171,
399
- "grad_norm": 1.9592632055282593,
400
- "learning_rate": 4.991124960611916e-06,
401
- "loss": 0.4826,
402
- "step": 56
403
- },
404
- {
405
- "epoch": 0.2780487804878049,
406
- "grad_norm": 1.6813147068023682,
407
- "learning_rate": 4.99079949790478e-06,
408
- "loss": 0.5786,
409
- "step": 57
410
- },
411
- {
412
- "epoch": 0.28292682926829266,
413
- "grad_norm": 1.5665404796600342,
414
- "learning_rate": 4.99046818553652e-06,
415
- "loss": 0.6276,
416
- "step": 58
417
- },
418
- {
419
- "epoch": 0.28780487804878047,
420
- "grad_norm": 1.9345953464508057,
421
- "learning_rate": 4.9901310242852246e-06,
422
- "loss": 0.5943,
423
- "step": 59
424
- },
425
- {
426
- "epoch": 0.2926829268292683,
427
- "grad_norm": 1.4303550720214844,
428
- "learning_rate": 4.9897880149427206e-06,
429
- "loss": 0.5156,
430
- "step": 60
431
- },
432
- {
433
- "epoch": 0.2975609756097561,
434
- "grad_norm": 2.3535423278808594,
435
- "learning_rate": 4.989439158314566e-06,
436
- "loss": 0.6139,
437
- "step": 61
438
- },
439
- {
440
- "epoch": 0.3024390243902439,
441
- "grad_norm": 1.617553949356079,
442
- "learning_rate": 4.989084455220056e-06,
443
- "loss": 0.4543,
444
- "step": 62
445
- },
446
- {
447
- "epoch": 0.3073170731707317,
448
- "grad_norm": 2.2876179218292236,
449
- "learning_rate": 4.988723906492212e-06,
450
- "loss": 0.8291,
451
- "step": 63
452
- },
453
- {
454
- "epoch": 0.3121951219512195,
455
- "grad_norm": 1.781219720840454,
456
- "learning_rate": 4.988357512977785e-06,
457
- "loss": 0.4443,
458
- "step": 64
459
- },
460
- {
461
- "epoch": 0.3170731707317073,
462
- "grad_norm": 1.8858840465545654,
463
- "learning_rate": 4.987985275537252e-06,
464
- "loss": 0.4885,
465
- "step": 65
466
- },
467
- {
468
- "epoch": 0.32195121951219513,
469
- "grad_norm": 1.685311198234558,
470
- "learning_rate": 4.9876071950448185e-06,
471
- "loss": 0.6638,
472
- "step": 66
473
- },
474
- {
475
- "epoch": 0.32682926829268294,
476
- "grad_norm": 1.711588740348816,
477
- "learning_rate": 4.987223272388407e-06,
478
- "loss": 0.4916,
479
- "step": 67
480
- },
481
- {
482
- "epoch": 0.33170731707317075,
483
- "grad_norm": 1.9743118286132812,
484
- "learning_rate": 4.986833508469663e-06,
485
- "loss": 0.837,
486
- "step": 68
487
- },
488
- {
489
- "epoch": 0.33658536585365856,
490
- "grad_norm": 1.3840869665145874,
491
- "learning_rate": 4.98643790420395e-06,
492
- "loss": 0.4716,
493
- "step": 69
494
- },
495
- {
496
- "epoch": 0.34146341463414637,
497
- "grad_norm": 2.4423623085021973,
498
- "learning_rate": 4.986036460520348e-06,
499
- "loss": 0.7045,
500
- "step": 70
501
- },
502
- {
503
- "epoch": 0.3463414634146341,
504
- "grad_norm": 1.9470542669296265,
505
- "learning_rate": 4.98562917836165e-06,
506
- "loss": 0.6841,
507
- "step": 71
508
- },
509
- {
510
- "epoch": 0.35121951219512193,
511
- "grad_norm": 1.357690691947937,
512
- "learning_rate": 4.985216058684362e-06,
513
- "loss": 0.4278,
514
- "step": 72
515
- },
516
- {
517
- "epoch": 0.35609756097560974,
518
- "grad_norm": 1.6999742984771729,
519
- "learning_rate": 4.984797102458697e-06,
520
- "loss": 0.8335,
521
- "step": 73
522
- },
523
- {
524
- "epoch": 0.36097560975609755,
525
- "grad_norm": 2.0041823387145996,
526
- "learning_rate": 4.984372310668579e-06,
527
- "loss": 0.5316,
528
- "step": 74
529
- },
530
- {
531
- "epoch": 0.36585365853658536,
532
- "grad_norm": 1.8468217849731445,
533
- "learning_rate": 4.983941684311633e-06,
534
- "loss": 1.0183,
535
- "step": 75
536
- },
537
- {
538
- "epoch": 0.37073170731707317,
539
- "grad_norm": 1.5536184310913086,
540
- "learning_rate": 4.983505224399188e-06,
541
- "loss": 0.4932,
542
- "step": 76
543
- },
544
- {
545
- "epoch": 0.375609756097561,
546
- "grad_norm": 1.3191509246826172,
547
- "learning_rate": 4.983062931956275e-06,
548
- "loss": 0.5752,
549
- "step": 77
550
- },
551
- {
552
- "epoch": 0.3804878048780488,
553
- "grad_norm": 1.9427785873413086,
554
- "learning_rate": 4.9826148080216195e-06,
555
- "loss": 0.6428,
556
- "step": 78
557
- },
558
- {
559
- "epoch": 0.3853658536585366,
560
- "grad_norm": 2.217280387878418,
561
- "learning_rate": 4.9821608536476445e-06,
562
- "loss": 1.0444,
563
- "step": 79
564
- },
565
- {
566
- "epoch": 0.3902439024390244,
567
- "grad_norm": 1.7984737157821655,
568
- "learning_rate": 4.981701069900465e-06,
569
- "loss": 0.5578,
570
- "step": 80
571
- },
572
- {
573
- "epoch": 0.3951219512195122,
574
- "grad_norm": 1.859869122505188,
575
- "learning_rate": 4.9812354578598876e-06,
576
- "loss": 0.5875,
577
- "step": 81
578
- },
579
- {
580
- "epoch": 0.4,
581
- "grad_norm": 1.8699328899383545,
582
- "learning_rate": 4.980764018619405e-06,
583
- "loss": 0.6775,
584
- "step": 82
585
- },
586
- {
587
- "epoch": 0.40487804878048783,
588
- "grad_norm": 1.6898409128189087,
589
- "learning_rate": 4.980286753286196e-06,
590
- "loss": 0.6081,
591
- "step": 83
592
- },
593
- {
594
- "epoch": 0.4097560975609756,
595
- "grad_norm": 1.5678975582122803,
596
- "learning_rate": 4.97980366298112e-06,
597
- "loss": 0.5573,
598
- "step": 84
599
- },
600
- {
601
- "epoch": 0.4146341463414634,
602
- "grad_norm": 1.4087779521942139,
603
- "learning_rate": 4.97931474883872e-06,
604
- "loss": 0.6142,
605
- "step": 85
606
- },
607
- {
608
- "epoch": 0.4195121951219512,
609
- "grad_norm": 1.8441438674926758,
610
- "learning_rate": 4.978820012007213e-06,
611
- "loss": 0.6677,
612
- "step": 86
613
- },
614
- {
615
- "epoch": 0.424390243902439,
616
- "grad_norm": 1.7426705360412598,
617
- "learning_rate": 4.978319453648495e-06,
618
- "loss": 0.7166,
619
- "step": 87
620
- },
621
- {
622
- "epoch": 0.4292682926829268,
623
- "grad_norm": 1.7627829313278198,
624
- "learning_rate": 4.977813074938128e-06,
625
- "loss": 0.5965,
626
- "step": 88
627
- },
628
- {
629
- "epoch": 0.43414634146341463,
630
- "grad_norm": 1.6656118631362915,
631
- "learning_rate": 4.977300877065347e-06,
632
- "loss": 0.5908,
633
- "step": 89
634
- },
635
- {
636
- "epoch": 0.43902439024390244,
637
- "grad_norm": 1.2669886350631714,
638
- "learning_rate": 4.976782861233053e-06,
639
- "loss": 0.4368,
640
- "step": 90
641
- },
642
- {
643
- "epoch": 0.44390243902439025,
644
- "grad_norm": 1.2918105125427246,
645
- "learning_rate": 4.976259028657812e-06,
646
- "loss": 0.3848,
647
- "step": 91
648
- },
649
- {
650
- "epoch": 0.44878048780487806,
651
- "grad_norm": 1.3664082288742065,
652
- "learning_rate": 4.975729380569845e-06,
653
- "loss": 0.5376,
654
- "step": 92
655
- },
656
- {
657
- "epoch": 0.45365853658536587,
658
- "grad_norm": 1.9810562133789062,
659
- "learning_rate": 4.975193918213035e-06,
660
- "loss": 0.4396,
661
- "step": 93
662
- },
663
- {
664
- "epoch": 0.4585365853658537,
665
- "grad_norm": 1.5167036056518555,
666
- "learning_rate": 4.974652642844921e-06,
667
- "loss": 0.5573,
668
- "step": 94
669
- },
670
- {
671
- "epoch": 0.4634146341463415,
672
- "grad_norm": 3.6640360355377197,
673
- "learning_rate": 4.974105555736693e-06,
674
- "loss": 0.8419,
675
- "step": 95
676
- },
677
- {
678
- "epoch": 0.4682926829268293,
679
- "grad_norm": 2.480140209197998,
680
- "learning_rate": 4.973552658173186e-06,
681
- "loss": 0.6014,
682
- "step": 96
683
- },
684
- {
685
- "epoch": 0.47317073170731705,
686
- "grad_norm": 1.6618154048919678,
687
- "learning_rate": 4.972993951452887e-06,
688
- "loss": 0.5581,
689
- "step": 97
690
- },
691
- {
692
- "epoch": 0.47804878048780486,
693
- "grad_norm": 1.291348934173584,
694
- "learning_rate": 4.9724294368879214e-06,
695
- "loss": 0.4499,
696
- "step": 98
697
- },
698
- {
699
- "epoch": 0.48292682926829267,
700
- "grad_norm": 1.8929156064987183,
701
- "learning_rate": 4.971859115804055e-06,
702
- "loss": 0.7873,
703
- "step": 99
704
- },
705
- {
706
- "epoch": 0.4878048780487805,
707
- "grad_norm": 1.57858145236969,
708
- "learning_rate": 4.9712829895406935e-06,
709
- "loss": 0.5793,
710
- "step": 100
711
- },
712
- {
713
- "epoch": 0.4926829268292683,
714
- "grad_norm": 1.1852738857269287,
715
- "learning_rate": 4.970701059450872e-06,
716
- "loss": 0.3179,
717
- "step": 101
718
- },
719
- {
720
- "epoch": 0.4975609756097561,
721
- "grad_norm": 1.8145536184310913,
722
- "learning_rate": 4.970113326901258e-06,
723
- "loss": 0.7649,
724
- "step": 102
725
- },
726
- {
727
- "epoch": 0.5024390243902439,
728
- "grad_norm": 1.5796900987625122,
729
- "learning_rate": 4.9695197932721455e-06,
730
- "loss": 0.6736,
731
- "step": 103
732
- },
733
- {
734
- "epoch": 0.5073170731707317,
735
- "grad_norm": 1.4091283082962036,
736
- "learning_rate": 4.968920459957453e-06,
737
- "loss": 0.6019,
738
- "step": 104
739
- },
740
- {
741
- "epoch": 0.5121951219512195,
742
- "grad_norm": 2.3007757663726807,
743
- "learning_rate": 4.968315328364719e-06,
744
- "loss": 0.8106,
745
- "step": 105
746
- },
747
- {
748
- "epoch": 0.5170731707317073,
749
- "grad_norm": 2.226351499557495,
750
- "learning_rate": 4.9677043999151e-06,
751
- "loss": 0.9389,
752
- "step": 106
753
- },
754
- {
755
- "epoch": 0.5219512195121951,
756
- "grad_norm": 1.713365912437439,
757
- "learning_rate": 4.967087676043366e-06,
758
- "loss": 0.3773,
759
- "step": 107
760
- },
761
- {
762
- "epoch": 0.526829268292683,
763
- "grad_norm": 1.6297917366027832,
764
- "learning_rate": 4.966465158197897e-06,
765
- "loss": 0.6278,
766
- "step": 108
767
- },
768
- {
769
- "epoch": 0.5317073170731708,
770
- "grad_norm": 1.7754937410354614,
771
- "learning_rate": 4.965836847840681e-06,
772
- "loss": 0.5086,
773
- "step": 109
774
- },
775
- {
776
- "epoch": 0.5365853658536586,
777
- "grad_norm": 1.6794660091400146,
778
- "learning_rate": 4.96520274644731e-06,
779
- "loss": 0.6123,
780
- "step": 110
781
- },
782
- {
783
- "epoch": 0.5414634146341464,
784
- "grad_norm": 1.934241533279419,
785
- "learning_rate": 4.964562855506976e-06,
786
- "loss": 0.5779,
787
- "step": 111
788
- },
789
- {
790
- "epoch": 0.5463414634146342,
791
- "grad_norm": 1.2584961652755737,
792
- "learning_rate": 4.963917176522466e-06,
793
- "loss": 0.4115,
794
- "step": 112
795
- },
796
- {
797
- "epoch": 0.551219512195122,
798
- "grad_norm": 2.1711580753326416,
799
- "learning_rate": 4.963265711010164e-06,
800
- "loss": 0.7418,
801
- "step": 113
802
- },
803
- {
804
- "epoch": 0.5560975609756098,
805
- "grad_norm": 1.650855541229248,
806
- "learning_rate": 4.9626084605000395e-06,
807
- "loss": 0.5725,
808
- "step": 114
809
- },
810
- {
811
- "epoch": 0.5609756097560976,
812
- "grad_norm": 1.9013773202896118,
813
- "learning_rate": 4.961945426535652e-06,
814
- "loss": 0.4464,
815
- "step": 115
816
- },
817
- {
818
- "epoch": 0.5658536585365853,
819
- "grad_norm": 1.4872888326644897,
820
- "learning_rate": 4.961276610674141e-06,
821
- "loss": 0.6751,
822
- "step": 116
823
- },
824
- {
825
- "epoch": 0.5707317073170731,
826
- "grad_norm": 1.5318901538848877,
827
- "learning_rate": 4.960602014486225e-06,
828
- "loss": 0.7975,
829
- "step": 117
830
- },
831
- {
832
- "epoch": 0.5756097560975609,
833
- "grad_norm": 1.2589044570922852,
834
- "learning_rate": 4.959921639556199e-06,
835
- "loss": 0.4939,
836
- "step": 118
837
- },
838
- {
839
- "epoch": 0.5804878048780487,
840
- "grad_norm": 1.881285548210144,
841
- "learning_rate": 4.959235487481928e-06,
842
- "loss": 0.8222,
843
- "step": 119
844
- },
845
- {
846
- "epoch": 0.5853658536585366,
847
- "grad_norm": 1.576146125793457,
848
- "learning_rate": 4.958543559874846e-06,
849
- "loss": 0.4076,
850
- "step": 120
851
- },
852
- {
853
- "epoch": 0.5902439024390244,
854
- "grad_norm": 2.0078847408294678,
855
- "learning_rate": 4.9578458583599495e-06,
856
- "loss": 0.562,
857
- "step": 121
858
- },
859
- {
860
- "epoch": 0.5951219512195122,
861
- "grad_norm": 1.4955941438674927,
862
- "learning_rate": 4.957142384575795e-06,
863
- "loss": 0.5184,
864
- "step": 122
865
- },
866
- {
867
- "epoch": 0.6,
868
- "grad_norm": 1.9556761980056763,
869
- "learning_rate": 4.956433140174498e-06,
870
- "loss": 0.7036,
871
- "step": 123
872
- },
873
- {
874
- "epoch": 0.6048780487804878,
875
- "grad_norm": 1.9274554252624512,
876
- "learning_rate": 4.9557181268217225e-06,
877
- "loss": 0.7137,
878
- "step": 124
879
- },
880
- {
881
- "epoch": 0.6097560975609756,
882
- "grad_norm": 1.672255277633667,
883
- "learning_rate": 4.954997346196683e-06,
884
- "loss": 1.0138,
885
- "step": 125
886
- },
887
- {
888
- "epoch": 0.6146341463414634,
889
- "grad_norm": 1.6470623016357422,
890
- "learning_rate": 4.954270799992138e-06,
891
- "loss": 0.6059,
892
- "step": 126
893
- },
894
- {
895
- "epoch": 0.6195121951219512,
896
- "grad_norm": 2.0698301792144775,
897
- "learning_rate": 4.953538489914387e-06,
898
- "loss": 0.6224,
899
- "step": 127
900
- },
901
- {
902
- "epoch": 0.624390243902439,
903
- "grad_norm": 1.7772653102874756,
904
- "learning_rate": 4.9528004176832654e-06,
905
- "loss": 0.4795,
906
- "step": 128
907
- },
908
- {
909
- "epoch": 0.6292682926829268,
910
- "grad_norm": 2.276451587677002,
911
- "learning_rate": 4.952056585032142e-06,
912
- "loss": 0.8591,
913
- "step": 129
914
- },
915
- {
916
- "epoch": 0.6341463414634146,
917
- "grad_norm": 2.058436870574951,
918
- "learning_rate": 4.951306993707913e-06,
919
- "loss": 0.6678,
920
- "step": 130
921
- },
922
- {
923
- "epoch": 0.6390243902439025,
924
- "grad_norm": 1.8003332614898682,
925
- "learning_rate": 4.950551645470998e-06,
926
- "loss": 0.617,
927
- "step": 131
928
- },
929
- {
930
- "epoch": 0.6439024390243903,
931
- "grad_norm": 2.0872535705566406,
932
- "learning_rate": 4.9497905420953406e-06,
933
- "loss": 0.611,
934
- "step": 132
935
- },
936
- {
937
- "epoch": 0.6487804878048781,
938
- "grad_norm": 2.1815896034240723,
939
- "learning_rate": 4.949023685368395e-06,
940
- "loss": 0.6445,
941
- "step": 133
942
- },
943
- {
944
- "epoch": 0.6536585365853659,
945
- "grad_norm": 1.7983999252319336,
946
- "learning_rate": 4.948251077091131e-06,
947
- "loss": 0.8733,
948
- "step": 134
949
- },
950
- {
951
- "epoch": 0.6585365853658537,
952
- "grad_norm": 1.6845791339874268,
953
- "learning_rate": 4.947472719078025e-06,
954
- "loss": 0.6613,
955
- "step": 135
956
- },
957
- {
958
- "epoch": 0.6634146341463415,
959
- "grad_norm": 1.6869384050369263,
960
- "learning_rate": 4.9466886131570565e-06,
961
- "loss": 0.7319,
962
- "step": 136
963
- },
964
- {
965
- "epoch": 0.6682926829268293,
966
- "grad_norm": 1.9151450395584106,
967
- "learning_rate": 4.945898761169704e-06,
968
- "loss": 0.8795,
969
- "step": 137
970
- },
971
- {
972
- "epoch": 0.6731707317073171,
973
- "grad_norm": 1.3423423767089844,
974
- "learning_rate": 4.945103164970941e-06,
975
- "loss": 0.4385,
976
- "step": 138
977
- },
978
- {
979
- "epoch": 0.6780487804878049,
980
- "grad_norm": 1.4854035377502441,
981
- "learning_rate": 4.9443018264292304e-06,
982
- "loss": 0.5341,
983
- "step": 139
984
- },
985
- {
986
- "epoch": 0.6829268292682927,
987
- "grad_norm": 1.805953025817871,
988
- "learning_rate": 4.9434947474265225e-06,
989
- "loss": 0.6463,
990
- "step": 140
991
- },
992
- {
993
- "epoch": 0.6878048780487804,
994
- "grad_norm": 1.7254730463027954,
995
- "learning_rate": 4.942681929858249e-06,
996
- "loss": 0.8897,
997
- "step": 141
998
- },
999
- {
1000
- "epoch": 0.6926829268292682,
1001
- "grad_norm": 1.9024320840835571,
1002
- "learning_rate": 4.941863375633315e-06,
1003
- "loss": 0.7033,
1004
- "step": 142
1005
- },
1006
- {
1007
- "epoch": 0.697560975609756,
1008
- "grad_norm": 1.4351361989974976,
1009
- "learning_rate": 4.9410390866741056e-06,
1010
- "loss": 0.527,
1011
- "step": 143
1012
- },
1013
- {
1014
- "epoch": 0.7024390243902439,
1015
- "grad_norm": 1.4365131855010986,
1016
- "learning_rate": 4.9402090649164655e-06,
1017
- "loss": 0.5458,
1018
- "step": 144
1019
- },
1020
- {
1021
- "epoch": 0.7073170731707317,
1022
- "grad_norm": 2.144742965698242,
1023
- "learning_rate": 4.9393733123097085e-06,
1024
- "loss": 0.8133,
1025
- "step": 145
1026
- },
1027
- {
1028
- "epoch": 0.7121951219512195,
1029
- "grad_norm": 2.066554546356201,
1030
- "learning_rate": 4.9385318308166065e-06,
1031
- "loss": 0.7282,
1032
- "step": 146
1033
- },
1034
- {
1035
- "epoch": 0.7170731707317073,
1036
- "grad_norm": 1.5883394479751587,
1037
- "learning_rate": 4.937684622413385e-06,
1038
- "loss": 0.4144,
1039
- "step": 147
1040
- },
1041
- {
1042
- "epoch": 0.7219512195121951,
1043
- "grad_norm": 2.329960346221924,
1044
- "learning_rate": 4.9368316890897185e-06,
1045
- "loss": 0.8182,
1046
- "step": 148
1047
- },
1048
- {
1049
- "epoch": 0.7268292682926829,
1050
- "grad_norm": 1.5622303485870361,
1051
- "learning_rate": 4.9359730328487264e-06,
1052
- "loss": 0.412,
1053
- "step": 149
1054
- },
1055
- {
1056
- "epoch": 0.7317073170731707,
1057
- "grad_norm": 1.3020845651626587,
1058
- "learning_rate": 4.935108655706972e-06,
1059
- "loss": 0.5607,
1060
- "step": 150
1061
- },
1062
- {
1063
- "epoch": 0.7365853658536585,
1064
- "grad_norm": 2.0023865699768066,
1065
- "learning_rate": 4.934238559694448e-06,
1066
- "loss": 0.9273,
1067
- "step": 151
1068
- },
1069
- {
1070
- "epoch": 0.7414634146341463,
1071
- "grad_norm": 1.6495121717453003,
1072
- "learning_rate": 4.9333627468545845e-06,
1073
- "loss": 0.4775,
1074
- "step": 152
1075
- },
1076
- {
1077
- "epoch": 0.7463414634146341,
1078
- "grad_norm": 1.4878952503204346,
1079
- "learning_rate": 4.932481219244231e-06,
1080
- "loss": 0.562,
1081
- "step": 153
1082
- },
1083
- {
1084
- "epoch": 0.751219512195122,
1085
- "grad_norm": 2.352097511291504,
1086
- "learning_rate": 4.931593978933666e-06,
1087
- "loss": 0.6151,
1088
- "step": 154
1089
- },
1090
- {
1091
- "epoch": 0.7560975609756098,
1092
- "grad_norm": 1.3260765075683594,
1093
- "learning_rate": 4.930701028006577e-06,
1094
- "loss": 0.6572,
1095
- "step": 155
1096
- },
1097
- {
1098
- "epoch": 0.7609756097560976,
1099
- "grad_norm": 1.3987398147583008,
1100
- "learning_rate": 4.929802368560066e-06,
1101
- "loss": 0.5205,
1102
- "step": 156
1103
- },
1104
- {
1105
- "epoch": 0.7658536585365854,
1106
- "grad_norm": 2.636143922805786,
1107
- "learning_rate": 4.928898002704642e-06,
1108
- "loss": 0.7562,
1109
- "step": 157
1110
- },
1111
- {
1112
- "epoch": 0.7707317073170732,
1113
- "grad_norm": 2.312101125717163,
1114
- "learning_rate": 4.927987932564215e-06,
1115
- "loss": 0.6415,
1116
- "step": 158
1117
- },
1118
- {
1119
- "epoch": 0.775609756097561,
1120
- "grad_norm": 1.6384342908859253,
1121
- "learning_rate": 4.927072160276092e-06,
1122
- "loss": 0.6148,
1123
- "step": 159
1124
- },
1125
- {
1126
- "epoch": 0.7804878048780488,
1127
- "grad_norm": 1.3910821676254272,
1128
- "learning_rate": 4.926150687990969e-06,
1129
- "loss": 0.3866,
1130
- "step": 160
1131
- },
1132
- {
1133
- "epoch": 0.7853658536585366,
1134
- "grad_norm": 1.7929809093475342,
1135
- "learning_rate": 4.925223517872934e-06,
1136
- "loss": 0.7282,
1137
- "step": 161
1138
- },
1139
- {
1140
- "epoch": 0.7902439024390244,
1141
- "grad_norm": 2.1226377487182617,
1142
- "learning_rate": 4.9242906520994484e-06,
1143
- "loss": 0.8234,
1144
- "step": 162
1145
- },
1146
- {
1147
- "epoch": 0.7951219512195122,
1148
- "grad_norm": 1.6692653894424438,
1149
- "learning_rate": 4.923352092861358e-06,
1150
- "loss": 0.7111,
1151
- "step": 163
1152
- },
1153
- {
1154
- "epoch": 0.8,
1155
- "grad_norm": 1.7468419075012207,
1156
- "learning_rate": 4.922407842362875e-06,
1157
- "loss": 0.4963,
1158
- "step": 164
1159
- },
1160
- {
1161
- "epoch": 0.8048780487804879,
1162
- "grad_norm": 1.5026869773864746,
1163
- "learning_rate": 4.921457902821578e-06,
1164
- "loss": 0.7182,
1165
- "step": 165
1166
- },
1167
- {
1168
- "epoch": 0.8097560975609757,
1169
- "grad_norm": 1.565578818321228,
1170
- "learning_rate": 4.920502276468408e-06,
1171
- "loss": 0.6331,
1172
- "step": 166
1173
- },
1174
- {
1175
- "epoch": 0.8146341463414634,
1176
- "grad_norm": 1.5285255908966064,
1177
- "learning_rate": 4.9195409655476605e-06,
1178
- "loss": 0.5216,
1179
- "step": 167
1180
- },
1181
- {
1182
- "epoch": 0.8195121951219512,
1183
- "grad_norm": 2.1599016189575195,
1184
- "learning_rate": 4.918573972316982e-06,
1185
- "loss": 0.8197,
1186
- "step": 168
1187
- },
1188
- {
1189
- "epoch": 0.824390243902439,
1190
- "grad_norm": 1.9766514301300049,
1191
- "learning_rate": 4.917601299047361e-06,
1192
- "loss": 0.6364,
1193
- "step": 169
1194
- },
1195
- {
1196
- "epoch": 0.8292682926829268,
1197
- "grad_norm": 1.6226218938827515,
1198
- "learning_rate": 4.916622948023129e-06,
1199
- "loss": 0.5688,
1200
- "step": 170
1201
- },
1202
- {
1203
- "epoch": 0.8341463414634146,
1204
- "grad_norm": 2.169351100921631,
1205
- "learning_rate": 4.915638921541952e-06,
1206
- "loss": 0.5144,
1207
- "step": 171
1208
- },
1209
- {
1210
- "epoch": 0.8390243902439024,
1211
- "grad_norm": 2.0374093055725098,
1212
- "learning_rate": 4.914649221914822e-06,
1213
- "loss": 0.6684,
1214
- "step": 172
1215
- },
1216
- {
1217
- "epoch": 0.8439024390243902,
1218
- "grad_norm": 1.8811930418014526,
1219
- "learning_rate": 4.913653851466057e-06,
1220
- "loss": 0.4537,
1221
- "step": 173
1222
- },
1223
- {
1224
- "epoch": 0.848780487804878,
1225
- "grad_norm": 1.7003443241119385,
1226
- "learning_rate": 4.912652812533291e-06,
1227
- "loss": 0.5926,
1228
- "step": 174
1229
- },
1230
- {
1231
- "epoch": 0.8536585365853658,
1232
- "grad_norm": 1.6899495124816895,
1233
- "learning_rate": 4.911646107467472e-06,
1234
- "loss": 0.6797,
1235
- "step": 175
1236
- },
1237
- {
1238
- "epoch": 0.8585365853658536,
1239
- "grad_norm": 1.5597474575042725,
1240
- "learning_rate": 4.9106337386328524e-06,
1241
- "loss": 0.7363,
1242
- "step": 176
1243
- },
1244
- {
1245
- "epoch": 0.8634146341463415,
1246
- "grad_norm": 1.7820264101028442,
1247
- "learning_rate": 4.909615708406991e-06,
1248
- "loss": 0.7277,
1249
- "step": 177
1250
- },
1251
- {
1252
- "epoch": 0.8682926829268293,
1253
- "grad_norm": 1.5444871187210083,
1254
- "learning_rate": 4.908592019180738e-06,
1255
- "loss": 0.4991,
1256
- "step": 178
1257
- },
1258
- {
1259
- "epoch": 0.8731707317073171,
1260
- "grad_norm": 1.1890966892242432,
1261
- "learning_rate": 4.907562673358234e-06,
1262
- "loss": 0.4345,
1263
- "step": 179
1264
- },
1265
- {
1266
- "epoch": 0.8780487804878049,
1267
- "grad_norm": 1.6008920669555664,
1268
- "learning_rate": 4.906527673356907e-06,
1269
- "loss": 0.506,
1270
- "step": 180
1271
- },
1272
- {
1273
- "epoch": 0.8829268292682927,
1274
- "grad_norm": 1.3028374910354614,
1275
- "learning_rate": 4.905487021607462e-06,
1276
- "loss": 0.4031,
1277
- "step": 181
1278
- },
1279
- {
1280
- "epoch": 0.8878048780487805,
1281
- "grad_norm": 2.086660146713257,
1282
- "learning_rate": 4.904440720553876e-06,
1283
- "loss": 0.6167,
1284
- "step": 182
1285
- },
1286
- {
1287
- "epoch": 0.8926829268292683,
1288
- "grad_norm": 1.5964947938919067,
1289
- "learning_rate": 4.903388772653396e-06,
1290
- "loss": 0.5923,
1291
- "step": 183
1292
- },
1293
- {
1294
- "epoch": 0.8975609756097561,
1295
- "grad_norm": 1.7292804718017578,
1296
- "learning_rate": 4.902331180376529e-06,
1297
- "loss": 0.6047,
1298
- "step": 184
1299
- },
1300
- {
1301
- "epoch": 0.9024390243902439,
1302
- "grad_norm": 1.6994556188583374,
1303
- "learning_rate": 4.901267946207038e-06,
1304
- "loss": 0.7615,
1305
- "step": 185
1306
- },
1307
- {
1308
- "epoch": 0.9073170731707317,
1309
- "grad_norm": 1.9247877597808838,
1310
- "learning_rate": 4.900199072641937e-06,
1311
- "loss": 0.5834,
1312
- "step": 186
1313
- },
1314
- {
1315
- "epoch": 0.9121951219512195,
1316
- "grad_norm": 1.4916514158248901,
1317
- "learning_rate": 4.899124562191484e-06,
1318
- "loss": 0.4737,
1319
- "step": 187
1320
- },
1321
- {
1322
- "epoch": 0.9170731707317074,
1323
- "grad_norm": 1.9706366062164307,
1324
- "learning_rate": 4.8980444173791735e-06,
1325
- "loss": 0.4418,
1326
- "step": 188
1327
- },
1328
- {
1329
- "epoch": 0.9219512195121952,
1330
- "grad_norm": 2.295691728591919,
1331
- "learning_rate": 4.896958640741735e-06,
1332
- "loss": 0.7035,
1333
- "step": 189
1334
- },
1335
- {
1336
- "epoch": 0.926829268292683,
1337
- "grad_norm": 2.459785223007202,
1338
- "learning_rate": 4.895867234829121e-06,
1339
- "loss": 0.7988,
1340
- "step": 190
1341
- },
1342
- {
1343
- "epoch": 0.9317073170731708,
1344
- "grad_norm": 1.6266491413116455,
1345
- "learning_rate": 4.894770202204509e-06,
1346
- "loss": 0.5665,
1347
- "step": 191
1348
- },
1349
- {
1350
- "epoch": 0.9365853658536586,
1351
- "grad_norm": 1.549321174621582,
1352
- "learning_rate": 4.893667545444285e-06,
1353
- "loss": 0.5774,
1354
- "step": 192
1355
- },
1356
- {
1357
- "epoch": 0.9414634146341463,
1358
- "grad_norm": 2.014610767364502,
1359
- "learning_rate": 4.8925592671380495e-06,
1360
- "loss": 0.5668,
1361
- "step": 193
1362
- },
1363
- {
1364
- "epoch": 0.9463414634146341,
1365
- "grad_norm": 1.3015650510787964,
1366
- "learning_rate": 4.891445369888601e-06,
1367
- "loss": 0.427,
1368
- "step": 194
1369
- },
1370
- {
1371
- "epoch": 0.9512195121951219,
1372
- "grad_norm": 1.6933586597442627,
1373
- "learning_rate": 4.890325856311936e-06,
1374
- "loss": 0.7354,
1375
- "step": 195
1376
- },
1377
- {
1378
- "epoch": 0.9560975609756097,
1379
- "grad_norm": 2.1092705726623535,
1380
- "learning_rate": 4.889200729037241e-06,
1381
- "loss": 0.7039,
1382
- "step": 196
1383
- },
1384
- {
1385
- "epoch": 0.9609756097560975,
1386
- "grad_norm": 1.625111699104309,
1387
- "learning_rate": 4.888069990706884e-06,
1388
- "loss": 0.5369,
1389
- "step": 197
1390
- },
1391
- {
1392
- "epoch": 0.9658536585365853,
1393
- "grad_norm": 1.4547514915466309,
1394
- "learning_rate": 4.886933643976414e-06,
1395
- "loss": 0.3645,
1396
- "step": 198
1397
- },
1398
- {
1399
- "epoch": 0.9707317073170731,
1400
- "grad_norm": 1.832529902458191,
1401
- "learning_rate": 4.885791691514548e-06,
1402
- "loss": 0.422,
1403
- "step": 199
1404
- },
1405
- {
1406
- "epoch": 0.975609756097561,
1407
- "grad_norm": 1.533402442932129,
1408
- "learning_rate": 4.884644136003172e-06,
1409
- "loss": 0.4698,
1410
- "step": 200
1411
- },
1412
- {
1413
- "epoch": 0.9804878048780488,
1414
- "grad_norm": 1.9563912153244019,
1415
- "learning_rate": 4.883490980137327e-06,
1416
- "loss": 1.1273,
1417
- "step": 201
1418
- },
1419
- {
1420
- "epoch": 0.9853658536585366,
1421
- "grad_norm": 1.7044615745544434,
1422
- "learning_rate": 4.882332226625208e-06,
1423
- "loss": 0.6056,
1424
- "step": 202
1425
- },
1426
- {
1427
- "epoch": 0.9902439024390244,
1428
- "grad_norm": 1.6405285596847534,
1429
- "learning_rate": 4.881167878188158e-06,
1430
- "loss": 0.7204,
1431
- "step": 203
1432
- },
1433
- {
1434
- "epoch": 0.9951219512195122,
1435
- "grad_norm": 1.5838991403579712,
1436
- "learning_rate": 4.8799979375606565e-06,
1437
- "loss": 0.553,
1438
- "step": 204
1439
- },
1440
- {
1441
- "epoch": 1.0,
1442
- "grad_norm": 1.6604121923446655,
1443
- "learning_rate": 4.878822407490319e-06,
1444
- "loss": 0.4694,
1445
- "step": 205
1446
- },
1447
- {
1448
- "epoch": 1.0048780487804878,
1449
- "grad_norm": 1.4410219192504883,
1450
- "learning_rate": 4.8776412907378845e-06,
1451
- "loss": 0.3954,
1452
- "step": 206
1453
- },
1454
- {
1455
- "epoch": 1.0097560975609756,
1456
- "grad_norm": 1.3399821519851685,
1457
- "learning_rate": 4.876454590077216e-06,
1458
- "loss": 0.4453,
1459
- "step": 207
1460
- },
1461
- {
1462
- "epoch": 1.0146341463414634,
1463
- "grad_norm": 1.3594847917556763,
1464
- "learning_rate": 4.875262308295289e-06,
1465
- "loss": 0.5706,
1466
- "step": 208
1467
- },
1468
- {
1469
- "epoch": 1.0195121951219512,
1470
- "grad_norm": 1.4475713968276978,
1471
- "learning_rate": 4.874064448192185e-06,
1472
- "loss": 0.4708,
1473
- "step": 209
1474
- },
1475
- {
1476
- "epoch": 1.024390243902439,
1477
- "grad_norm": 2.237009286880493,
1478
- "learning_rate": 4.872861012581088e-06,
1479
- "loss": 0.4116,
1480
- "step": 210
1481
- },
1482
- {
1483
- "epoch": 1.0292682926829269,
1484
- "grad_norm": 1.9443275928497314,
1485
- "learning_rate": 4.871652004288275e-06,
1486
- "loss": 0.5268,
1487
- "step": 211
1488
- },
1489
- {
1490
- "epoch": 1.0341463414634147,
1491
- "grad_norm": 2.032132387161255,
1492
- "learning_rate": 4.870437426153113e-06,
1493
- "loss": 0.5144,
1494
- "step": 212
1495
- },
1496
- {
1497
- "epoch": 1.0390243902439025,
1498
- "grad_norm": 2.908564329147339,
1499
- "learning_rate": 4.869217281028045e-06,
1500
- "loss": 0.6369,
1501
- "step": 213
1502
- },
1503
- {
1504
- "epoch": 1.0439024390243903,
1505
- "grad_norm": 1.9555280208587646,
1506
- "learning_rate": 4.867991571778592e-06,
1507
- "loss": 0.6615,
1508
- "step": 214
1509
- },
1510
- {
1511
- "epoch": 1.048780487804878,
1512
- "grad_norm": 1.4361177682876587,
1513
- "learning_rate": 4.866760301283342e-06,
1514
- "loss": 0.3721,
1515
- "step": 215
1516
- },
1517
- {
1518
- "epoch": 1.053658536585366,
1519
- "grad_norm": 1.5162372589111328,
1520
- "learning_rate": 4.865523472433942e-06,
1521
- "loss": 0.5117,
1522
- "step": 216
1523
- },
1524
- {
1525
- "epoch": 1.0585365853658537,
1526
- "grad_norm": 1.4366101026535034,
1527
- "learning_rate": 4.8642810881350935e-06,
1528
- "loss": 0.4239,
1529
- "step": 217
1530
- },
1531
- {
1532
- "epoch": 1.0634146341463415,
1533
- "grad_norm": 1.6032313108444214,
1534
- "learning_rate": 4.863033151304546e-06,
1535
- "loss": 0.4752,
1536
- "step": 218
1537
- },
1538
- {
1539
- "epoch": 1.0682926829268293,
1540
- "grad_norm": 9.823326110839844,
1541
- "learning_rate": 4.861779664873088e-06,
1542
- "loss": 0.7429,
1543
- "step": 219
1544
- },
1545
- {
1546
- "epoch": 1.0731707317073171,
1547
- "grad_norm": 2.1821725368499756,
1548
- "learning_rate": 4.8605206317845425e-06,
1549
- "loss": 0.5409,
1550
- "step": 220
1551
- },
1552
- {
1553
- "epoch": 1.078048780487805,
1554
- "grad_norm": 1.8968735933303833,
1555
- "learning_rate": 4.859256054995758e-06,
1556
- "loss": 0.6201,
1557
- "step": 221
1558
- },
1559
- {
1560
- "epoch": 1.0829268292682928,
1561
- "grad_norm": 1.3566253185272217,
1562
- "learning_rate": 4.8579859374766e-06,
1563
- "loss": 0.3323,
1564
- "step": 222
1565
- },
1566
- {
1567
- "epoch": 1.0878048780487806,
1568
- "grad_norm": 1.2179781198501587,
1569
- "learning_rate": 4.856710282209952e-06,
1570
- "loss": 0.2767,
1571
- "step": 223
1572
- },
1573
- {
1574
- "epoch": 1.0926829268292684,
1575
- "grad_norm": 1.5012304782867432,
1576
- "learning_rate": 4.855429092191698e-06,
1577
- "loss": 0.5419,
1578
- "step": 224
1579
- },
1580
- {
1581
- "epoch": 1.0975609756097562,
1582
- "grad_norm": 1.361253261566162,
1583
- "learning_rate": 4.854142370430725e-06,
1584
- "loss": 0.4209,
1585
- "step": 225
1586
- },
1587
- {
1588
- "epoch": 1.102439024390244,
1589
- "grad_norm": 1.694344162940979,
1590
- "learning_rate": 4.8528501199489045e-06,
1591
- "loss": 0.5328,
1592
- "step": 226
1593
- },
1594
- {
1595
- "epoch": 1.1073170731707318,
1596
- "grad_norm": 1.9685852527618408,
1597
- "learning_rate": 4.851552343781099e-06,
1598
- "loss": 0.66,
1599
- "step": 227
1600
- },
1601
- {
1602
- "epoch": 1.1121951219512196,
1603
- "grad_norm": 1.3181688785552979,
1604
- "learning_rate": 4.850249044975145e-06,
1605
- "loss": 0.533,
1606
- "step": 228
1607
- },
1608
- {
1609
- "epoch": 1.1170731707317074,
1610
- "grad_norm": 1.7146031856536865,
1611
- "learning_rate": 4.848940226591849e-06,
1612
- "loss": 0.8205,
1613
- "step": 229
1614
- },
1615
- {
1616
- "epoch": 1.1219512195121952,
1617
- "grad_norm": 1.6150505542755127,
1618
- "learning_rate": 4.847625891704982e-06,
1619
- "loss": 0.4771,
1620
- "step": 230
1621
- },
1622
- {
1623
- "epoch": 1.126829268292683,
1624
- "grad_norm": 1.6849442720413208,
1625
- "learning_rate": 4.846306043401268e-06,
1626
- "loss": 0.4493,
1627
- "step": 231
1628
- },
1629
- {
1630
- "epoch": 1.1317073170731708,
1631
- "grad_norm": 1.8745028972625732,
1632
- "learning_rate": 4.844980684780381e-06,
1633
- "loss": 0.4396,
1634
- "step": 232
1635
- },
1636
- {
1637
- "epoch": 1.1365853658536587,
1638
- "grad_norm": 1.984281301498413,
1639
- "learning_rate": 4.8436498189549345e-06,
1640
- "loss": 0.4695,
1641
- "step": 233
1642
- },
1643
- {
1644
- "epoch": 1.1414634146341462,
1645
- "grad_norm": 1.1260443925857544,
1646
- "learning_rate": 4.842313449050477e-06,
1647
- "loss": 0.3695,
1648
- "step": 234
1649
- },
1650
- {
1651
- "epoch": 1.146341463414634,
1652
- "grad_norm": 1.3031764030456543,
1653
- "learning_rate": 4.840971578205486e-06,
1654
- "loss": 0.341,
1655
- "step": 235
1656
- },
1657
- {
1658
- "epoch": 1.1512195121951219,
1659
- "grad_norm": 1.3267005681991577,
1660
- "learning_rate": 4.839624209571352e-06,
1661
- "loss": 0.2757,
1662
- "step": 236
1663
- },
1664
- {
1665
- "epoch": 1.1560975609756097,
1666
- "grad_norm": 1.595441460609436,
1667
- "learning_rate": 4.838271346312381e-06,
1668
- "loss": 0.6283,
1669
- "step": 237
1670
- },
1671
- {
1672
- "epoch": 1.1609756097560975,
1673
- "grad_norm": 1.6265649795532227,
1674
- "learning_rate": 4.836912991605782e-06,
1675
- "loss": 0.6538,
1676
- "step": 238
1677
- },
1678
- {
1679
- "epoch": 1.1658536585365853,
1680
- "grad_norm": 1.6886593103408813,
1681
- "learning_rate": 4.835549148641663e-06,
1682
- "loss": 0.4063,
1683
- "step": 239
1684
- },
1685
- {
1686
- "epoch": 1.170731707317073,
1687
- "grad_norm": 1.7404961585998535,
1688
- "learning_rate": 4.834179820623018e-06,
1689
- "loss": 0.5187,
1690
- "step": 240
1691
- },
1692
- {
1693
- "epoch": 1.175609756097561,
1694
- "grad_norm": 1.246699571609497,
1695
- "learning_rate": 4.832805010765724e-06,
1696
- "loss": 0.4216,
1697
- "step": 241
1698
- },
1699
- {
1700
- "epoch": 1.1804878048780487,
1701
- "grad_norm": 1.2928248643875122,
1702
- "learning_rate": 4.831424722298531e-06,
1703
- "loss": 0.4373,
1704
- "step": 242
1705
- },
1706
- {
1707
- "epoch": 1.1853658536585365,
1708
- "grad_norm": 1.948778510093689,
1709
- "learning_rate": 4.830038958463061e-06,
1710
- "loss": 0.496,
1711
- "step": 243
1712
- },
1713
- {
1714
- "epoch": 1.1902439024390243,
1715
- "grad_norm": 1.8547722101211548,
1716
- "learning_rate": 4.828647722513785e-06,
1717
- "loss": 0.6239,
1718
- "step": 244
1719
- },
1720
- {
1721
- "epoch": 1.1951219512195121,
1722
- "grad_norm": 2.0558724403381348,
1723
- "learning_rate": 4.827251017718034e-06,
1724
- "loss": 0.6466,
1725
- "step": 245
1726
- },
1727
- {
1728
- "epoch": 1.2,
1729
- "grad_norm": 1.8790161609649658,
1730
- "learning_rate": 4.8258488473559794e-06,
1731
- "loss": 0.6918,
1732
- "step": 246
1733
- },
1734
- {
1735
- "epoch": 1.2048780487804878,
1736
- "grad_norm": 1.9804240465164185,
1737
- "learning_rate": 4.824441214720629e-06,
1738
- "loss": 0.6641,
1739
- "step": 247
1740
- },
1741
- {
1742
- "epoch": 1.2097560975609756,
1743
- "grad_norm": 1.229308009147644,
1744
- "learning_rate": 4.823028123117818e-06,
1745
- "loss": 0.267,
1746
- "step": 248
1747
- },
1748
- {
1749
- "epoch": 1.2146341463414634,
1750
- "grad_norm": 1.3673735857009888,
1751
- "learning_rate": 4.8216095758662015e-06,
1752
- "loss": 0.543,
1753
- "step": 249
1754
- },
1755
- {
1756
- "epoch": 1.2195121951219512,
1757
- "grad_norm": 1.4206926822662354,
1758
- "learning_rate": 4.82018557629725e-06,
1759
- "loss": 0.6299,
1760
- "step": 250
1761
- },
1762
- {
1763
- "epoch": 1.224390243902439,
1764
- "grad_norm": 1.400539517402649,
1765
- "learning_rate": 4.8187561277552376e-06,
1766
- "loss": 0.4163,
1767
- "step": 251
1768
- },
1769
- {
1770
- "epoch": 1.2292682926829268,
1771
- "grad_norm": 1.3771297931671143,
1772
- "learning_rate": 4.817321233597232e-06,
1773
- "loss": 0.6105,
1774
- "step": 252
1775
- },
1776
- {
1777
- "epoch": 1.2341463414634146,
1778
- "grad_norm": 1.6716666221618652,
1779
- "learning_rate": 4.815880897193095e-06,
1780
- "loss": 0.3741,
1781
- "step": 253
1782
- },
1783
- {
1784
- "epoch": 1.2390243902439024,
1785
- "grad_norm": 1.9220954179763794,
1786
- "learning_rate": 4.814435121925466e-06,
1787
- "loss": 0.6125,
1788
- "step": 254
1789
- },
1790
- {
1791
- "epoch": 1.2439024390243902,
1792
- "grad_norm": 1.3171087503433228,
1793
- "learning_rate": 4.812983911189761e-06,
1794
- "loss": 0.4345,
1795
- "step": 255
1796
- },
1797
- {
1798
- "epoch": 1.248780487804878,
1799
- "grad_norm": 1.500893473625183,
1800
- "learning_rate": 4.811527268394157e-06,
1801
- "loss": 0.3848,
1802
- "step": 256
1803
- },
1804
- {
1805
- "epoch": 1.2536585365853659,
1806
- "grad_norm": 1.8701486587524414,
1807
- "learning_rate": 4.810065196959591e-06,
1808
- "loss": 0.4871,
1809
- "step": 257
1810
- },
1811
- {
1812
- "epoch": 1.2585365853658537,
1813
- "grad_norm": 1.7081878185272217,
1814
- "learning_rate": 4.8085977003197496e-06,
1815
- "loss": 0.6514,
1816
- "step": 258
1817
- },
1818
- {
1819
- "epoch": 1.2634146341463415,
1820
- "grad_norm": 2.583132266998291,
1821
- "learning_rate": 4.807124781921059e-06,
1822
- "loss": 0.8608,
1823
- "step": 259
1824
- },
1825
- {
1826
- "epoch": 1.2682926829268293,
1827
- "grad_norm": 1.467129111289978,
1828
- "learning_rate": 4.805646445222679e-06,
1829
- "loss": 0.3865,
1830
- "step": 260
1831
- },
1832
- {
1833
- "epoch": 1.273170731707317,
1834
- "grad_norm": 1.349326729774475,
1835
- "learning_rate": 4.804162693696494e-06,
1836
- "loss": 0.376,
1837
- "step": 261
1838
- },
1839
- {
1840
- "epoch": 1.278048780487805,
1841
- "grad_norm": 1.9350008964538574,
1842
- "learning_rate": 4.802673530827105e-06,
1843
- "loss": 0.4269,
1844
- "step": 262
1845
- },
1846
- {
1847
- "epoch": 1.2829268292682927,
1848
- "grad_norm": 1.1973533630371094,
1849
- "learning_rate": 4.801178960111823e-06,
1850
- "loss": 0.4611,
1851
- "step": 263
1852
- },
1853
- {
1854
- "epoch": 1.2878048780487805,
1855
- "grad_norm": 1.703409194946289,
1856
- "learning_rate": 4.799678985060658e-06,
1857
- "loss": 0.664,
1858
- "step": 264
1859
- },
1860
- {
1861
- "epoch": 1.2926829268292683,
1862
- "grad_norm": 1.3793981075286865,
1863
- "learning_rate": 4.798173609196314e-06,
1864
- "loss": 0.6515,
1865
- "step": 265
1866
- },
1867
- {
1868
- "epoch": 1.2975609756097561,
1869
- "grad_norm": 1.6443597078323364,
1870
- "learning_rate": 4.796662836054176e-06,
1871
- "loss": 0.3316,
1872
- "step": 266
1873
- },
1874
- {
1875
- "epoch": 1.302439024390244,
1876
- "grad_norm": 1.2548167705535889,
1877
- "learning_rate": 4.795146669182304e-06,
1878
- "loss": 0.4344,
1879
- "step": 267
1880
- },
1881
- {
1882
- "epoch": 1.3073170731707318,
1883
- "grad_norm": 1.4349764585494995,
1884
- "learning_rate": 4.793625112141431e-06,
1885
- "loss": 0.3804,
1886
- "step": 268
1887
- },
1888
- {
1889
- "epoch": 1.3121951219512196,
1890
- "grad_norm": 1.5517576932907104,
1891
- "learning_rate": 4.792098168504943e-06,
1892
- "loss": 0.4546,
1893
- "step": 269
1894
- },
1895
- {
1896
- "epoch": 1.3170731707317074,
1897
- "grad_norm": 1.3105218410491943,
1898
- "learning_rate": 4.790565841858879e-06,
1899
- "loss": 0.3096,
1900
- "step": 270
1901
- },
1902
- {
1903
- "epoch": 1.3219512195121952,
1904
- "grad_norm": 1.6386523246765137,
1905
- "learning_rate": 4.789028135801919e-06,
1906
- "loss": 0.6408,
1907
- "step": 271
1908
- },
1909
- {
1910
- "epoch": 1.326829268292683,
1911
- "grad_norm": 2.0389583110809326,
1912
- "learning_rate": 4.787485053945377e-06,
1913
- "loss": 0.7108,
1914
- "step": 272
1915
- },
1916
- {
1917
- "epoch": 1.3317073170731708,
1918
- "grad_norm": 1.4428800344467163,
1919
- "learning_rate": 4.785936599913193e-06,
1920
- "loss": 0.5489,
1921
- "step": 273
1922
- },
1923
- {
1924
- "epoch": 1.3365853658536586,
1925
- "grad_norm": 1.7775859832763672,
1926
- "learning_rate": 4.784382777341922e-06,
1927
- "loss": 0.5516,
1928
- "step": 274
1929
- },
1930
- {
1931
- "epoch": 1.3414634146341464,
1932
- "grad_norm": 2.048654794692993,
1933
- "learning_rate": 4.782823589880729e-06,
1934
- "loss": 0.8822,
1935
- "step": 275
1936
- },
1937
- {
1938
- "epoch": 1.346341463414634,
1939
- "grad_norm": 1.6368885040283203,
1940
- "learning_rate": 4.7812590411913755e-06,
1941
- "loss": 0.6008,
1942
- "step": 276
1943
- },
1944
- {
1945
- "epoch": 1.3512195121951218,
1946
- "grad_norm": 1.6349594593048096,
1947
- "learning_rate": 4.779689134948217e-06,
1948
- "loss": 0.8552,
1949
- "step": 277
1950
- },
1951
- {
1952
- "epoch": 1.3560975609756096,
1953
- "grad_norm": 2.0282487869262695,
1954
- "learning_rate": 4.77811387483819e-06,
1955
- "loss": 0.399,
1956
- "step": 278
1957
- },
1958
- {
1959
- "epoch": 1.3609756097560974,
1960
- "grad_norm": 1.902794599533081,
1961
- "learning_rate": 4.776533264560804e-06,
1962
- "loss": 0.583,
1963
- "step": 279
1964
- },
1965
- {
1966
- "epoch": 1.3658536585365852,
1967
- "grad_norm": 1.82004714012146,
1968
- "learning_rate": 4.774947307828134e-06,
1969
- "loss": 0.7195,
1970
- "step": 280
1971
- },
1972
- {
1973
- "epoch": 1.370731707317073,
1974
- "grad_norm": 1.4813153743743896,
1975
- "learning_rate": 4.773356008364812e-06,
1976
- "loss": 0.4371,
1977
- "step": 281
1978
- },
1979
- {
1980
- "epoch": 1.3756097560975609,
1981
- "grad_norm": 1.408211588859558,
1982
- "learning_rate": 4.771759369908017e-06,
1983
- "loss": 0.3133,
1984
- "step": 282
1985
- },
1986
- {
1987
- "epoch": 1.3804878048780487,
1988
- "grad_norm": 1.4923700094223022,
1989
- "learning_rate": 4.7701573962074635e-06,
1990
- "loss": 0.4993,
1991
- "step": 283
1992
- },
1993
- {
1994
- "epoch": 1.3853658536585365,
1995
- "grad_norm": 1.2863298654556274,
1996
- "learning_rate": 4.7685500910254015e-06,
1997
- "loss": 0.3568,
1998
- "step": 284
1999
- },
2000
- {
2001
- "epoch": 1.3902439024390243,
2002
- "grad_norm": 1.2870134115219116,
2003
- "learning_rate": 4.766937458136598e-06,
2004
- "loss": 0.5413,
2005
- "step": 285
2006
- },
2007
- {
2008
- "epoch": 1.395121951219512,
2009
- "grad_norm": 1.6780668497085571,
2010
- "learning_rate": 4.765319501328332e-06,
2011
- "loss": 0.6028,
2012
- "step": 286
2013
- },
2014
- {
2015
- "epoch": 1.4,
2016
- "grad_norm": 1.4770684242248535,
2017
- "learning_rate": 4.763696224400391e-06,
2018
- "loss": 0.3658,
2019
- "step": 287
2020
- },
2021
- {
2022
- "epoch": 1.4048780487804877,
2023
- "grad_norm": 1.2198718786239624,
2024
- "learning_rate": 4.762067631165049e-06,
2025
- "loss": 0.3817,
2026
- "step": 288
2027
- },
2028
- {
2029
- "epoch": 1.4097560975609755,
2030
- "grad_norm": 1.5318330526351929,
2031
- "learning_rate": 4.760433725447071e-06,
2032
- "loss": 0.5163,
2033
- "step": 289
2034
- },
2035
- {
2036
- "epoch": 1.4146341463414633,
2037
- "grad_norm": 1.849548578262329,
2038
- "learning_rate": 4.758794511083697e-06,
2039
- "loss": 0.6594,
2040
- "step": 290
2041
- },
2042
- {
2043
- "epoch": 1.4195121951219511,
2044
- "grad_norm": 1.7906076908111572,
2045
- "learning_rate": 4.757149991924633e-06,
2046
- "loss": 0.5289,
2047
- "step": 291
2048
- },
2049
- {
2050
- "epoch": 1.424390243902439,
2051
- "grad_norm": 1.328231930732727,
2052
- "learning_rate": 4.755500171832045e-06,
2053
- "loss": 0.3258,
2054
- "step": 292
2055
- },
2056
- {
2057
- "epoch": 1.4292682926829268,
2058
- "grad_norm": 1.619766354560852,
2059
- "learning_rate": 4.753845054680548e-06,
2060
- "loss": 0.4876,
2061
- "step": 293
2062
- },
2063
- {
2064
- "epoch": 1.4341463414634146,
2065
- "grad_norm": 1.6691818237304688,
2066
- "learning_rate": 4.752184644357197e-06,
2067
- "loss": 0.3899,
2068
- "step": 294
2069
- },
2070
- {
2071
- "epoch": 1.4390243902439024,
2072
- "grad_norm": 1.4808595180511475,
2073
- "learning_rate": 4.750518944761477e-06,
2074
- "loss": 0.4277,
2075
- "step": 295
2076
- },
2077
- {
2078
- "epoch": 1.4439024390243902,
2079
- "grad_norm": 1.71161687374115,
2080
- "learning_rate": 4.748847959805297e-06,
2081
- "loss": 0.4172,
2082
- "step": 296
2083
- },
2084
- {
2085
- "epoch": 1.448780487804878,
2086
- "grad_norm": 1.4367694854736328,
2087
- "learning_rate": 4.7471716934129774e-06,
2088
- "loss": 0.4121,
2089
- "step": 297
2090
- },
2091
- {
2092
- "epoch": 1.4536585365853658,
2093
- "grad_norm": 1.582480549812317,
2094
- "learning_rate": 4.745490149521242e-06,
2095
- "loss": 0.392,
2096
- "step": 298
2097
- },
2098
- {
2099
- "epoch": 1.4585365853658536,
2100
- "grad_norm": 1.9606919288635254,
2101
- "learning_rate": 4.743803332079209e-06,
2102
- "loss": 0.4478,
2103
- "step": 299
2104
- },
2105
- {
2106
- "epoch": 1.4634146341463414,
2107
- "grad_norm": 1.3635920286178589,
2108
- "learning_rate": 4.742111245048382e-06,
2109
- "loss": 0.3971,
2110
- "step": 300
2111
- },
2112
- {
2113
- "epoch": 1.4682926829268292,
2114
- "grad_norm": 1.2133499383926392,
2115
- "learning_rate": 4.740413892402639e-06,
2116
- "loss": 0.3502,
2117
- "step": 301
2118
- },
2119
- {
2120
- "epoch": 1.473170731707317,
2121
- "grad_norm": 1.5149365663528442,
2122
- "learning_rate": 4.738711278128228e-06,
2123
- "loss": 0.4716,
2124
- "step": 302
2125
- },
2126
- {
2127
- "epoch": 1.4780487804878049,
2128
- "grad_norm": 1.8573330640792847,
2129
- "learning_rate": 4.7370034062237476e-06,
2130
- "loss": 0.3264,
2131
- "step": 303
2132
- },
2133
- {
2134
- "epoch": 1.4829268292682927,
2135
- "grad_norm": 1.2374138832092285,
2136
- "learning_rate": 4.73529028070015e-06,
2137
- "loss": 0.4316,
2138
- "step": 304
2139
- },
2140
- {
2141
- "epoch": 1.4878048780487805,
2142
- "grad_norm": 1.797386646270752,
2143
- "learning_rate": 4.733571905580723e-06,
2144
- "loss": 0.7425,
2145
- "step": 305
2146
- },
2147
- {
2148
- "epoch": 1.4926829268292683,
2149
- "grad_norm": 1.6922880411148071,
2150
- "learning_rate": 4.731848284901082e-06,
2151
- "loss": 0.6001,
2152
- "step": 306
2153
- },
2154
- {
2155
- "epoch": 1.497560975609756,
2156
- "grad_norm": 1.5501389503479004,
2157
- "learning_rate": 4.730119422709165e-06,
2158
- "loss": 0.3536,
2159
- "step": 307
2160
- },
2161
- {
2162
- "epoch": 1.502439024390244,
2163
- "grad_norm": 1.9408127069473267,
2164
- "learning_rate": 4.728385323065215e-06,
2165
- "loss": 0.5422,
2166
- "step": 308
2167
- },
2168
- {
2169
- "epoch": 1.5073170731707317,
2170
- "grad_norm": 1.4196149110794067,
2171
- "learning_rate": 4.7266459900417815e-06,
2172
- "loss": 0.3504,
2173
- "step": 309
2174
- },
2175
- {
2176
- "epoch": 1.5121951219512195,
2177
- "grad_norm": 1.5519887208938599,
2178
- "learning_rate": 4.724901427723698e-06,
2179
- "loss": 0.6354,
2180
- "step": 310
2181
- },
2182
- {
2183
- "epoch": 1.5170731707317073,
2184
- "grad_norm": 1.867122769355774,
2185
- "learning_rate": 4.723151640208084e-06,
2186
- "loss": 0.3166,
2187
- "step": 311
2188
- },
2189
- {
2190
- "epoch": 1.5219512195121951,
2191
- "grad_norm": 1.6368682384490967,
2192
- "learning_rate": 4.721396631604327e-06,
2193
- "loss": 0.3401,
2194
- "step": 312
2195
- },
2196
- {
2197
- "epoch": 1.526829268292683,
2198
- "grad_norm": 1.2231279611587524,
2199
- "learning_rate": 4.7196364060340785e-06,
2200
- "loss": 0.3504,
2201
- "step": 313
2202
- },
2203
- {
2204
- "epoch": 1.5317073170731708,
2205
- "grad_norm": 1.622697353363037,
2206
- "learning_rate": 4.7178709676312416e-06,
2207
- "loss": 0.6338,
2208
- "step": 314
2209
- },
2210
- {
2211
- "epoch": 1.5365853658536586,
2212
- "grad_norm": 1.7756884098052979,
2213
- "learning_rate": 4.716100320541961e-06,
2214
- "loss": 0.8673,
2215
- "step": 315
2216
- },
2217
- {
2218
- "epoch": 1.5414634146341464,
2219
- "grad_norm": 1.6664738655090332,
2220
- "learning_rate": 4.714324468924614e-06,
2221
- "loss": 0.5582,
2222
- "step": 316
2223
- },
2224
- {
2225
- "epoch": 1.5463414634146342,
2226
- "grad_norm": 1.8026777505874634,
2227
- "learning_rate": 4.712543416949803e-06,
2228
- "loss": 0.6161,
2229
- "step": 317
2230
- },
2231
- {
2232
- "epoch": 1.551219512195122,
2233
- "grad_norm": 2.3471832275390625,
2234
- "learning_rate": 4.71075716880034e-06,
2235
- "loss": 0.5923,
2236
- "step": 318
2237
- },
2238
- {
2239
- "epoch": 1.5560975609756098,
2240
- "grad_norm": 1.9087973833084106,
2241
- "learning_rate": 4.708965728671243e-06,
2242
- "loss": 0.7339,
2243
- "step": 319
2244
- },
2245
- {
2246
- "epoch": 1.5609756097560976,
2247
- "grad_norm": 1.8473851680755615,
2248
- "learning_rate": 4.7071691007697214e-06,
2249
- "loss": 0.5554,
2250
- "step": 320
2251
- },
2252
- {
2253
- "epoch": 1.5658536585365854,
2254
- "grad_norm": 1.3977913856506348,
2255
- "learning_rate": 4.705367289315172e-06,
2256
- "loss": 0.5699,
2257
- "step": 321
2258
- },
2259
- {
2260
- "epoch": 1.5707317073170732,
2261
- "grad_norm": 1.6106524467468262,
2262
- "learning_rate": 4.703560298539158e-06,
2263
- "loss": 0.3525,
2264
- "step": 322
2265
- },
2266
- {
2267
- "epoch": 1.575609756097561,
2268
- "grad_norm": 1.5842078924179077,
2269
- "learning_rate": 4.701748132685415e-06,
2270
- "loss": 0.3749,
2271
- "step": 323
2272
- },
2273
- {
2274
- "epoch": 1.5804878048780489,
2275
- "grad_norm": 1.3624794483184814,
2276
- "learning_rate": 4.699930796009825e-06,
2277
- "loss": 0.4388,
2278
- "step": 324
2279
- },
2280
- {
2281
- "epoch": 1.5853658536585367,
2282
- "grad_norm": 1.9962315559387207,
2283
- "learning_rate": 4.698108292780418e-06,
2284
- "loss": 0.6069,
2285
- "step": 325
2286
- },
2287
- {
2288
- "epoch": 1.5902439024390245,
2289
- "grad_norm": 1.354267954826355,
2290
- "learning_rate": 4.696280627277356e-06,
2291
- "loss": 0.4206,
2292
- "step": 326
2293
- },
2294
- {
2295
- "epoch": 1.5951219512195123,
2296
- "grad_norm": 1.3546578884124756,
2297
- "learning_rate": 4.6944478037929255e-06,
2298
- "loss": 0.416,
2299
- "step": 327
2300
- },
2301
- {
2302
- "epoch": 1.6,
2303
- "grad_norm": 1.5159077644348145,
2304
- "learning_rate": 4.692609826631525e-06,
2305
- "loss": 0.4456,
2306
- "step": 328
2307
- },
2308
- {
2309
- "epoch": 1.604878048780488,
2310
- "grad_norm": 1.7104778289794922,
2311
- "learning_rate": 4.690766700109659e-06,
2312
- "loss": 0.295,
2313
- "step": 329
2314
- },
2315
- {
2316
- "epoch": 1.6097560975609757,
2317
- "grad_norm": 1.1690094470977783,
2318
- "learning_rate": 4.6889184285559234e-06,
2319
- "loss": 0.2971,
2320
- "step": 330
2321
- },
2322
- {
2323
- "epoch": 1.6146341463414635,
2324
- "grad_norm": 2.9057915210723877,
2325
- "learning_rate": 4.687065016310996e-06,
2326
- "loss": 0.6427,
2327
- "step": 331
2328
- },
2329
- {
2330
- "epoch": 1.6195121951219513,
2331
- "grad_norm": 1.862723469734192,
2332
- "learning_rate": 4.685206467727631e-06,
2333
- "loss": 0.5023,
2334
- "step": 332
2335
- },
2336
- {
2337
- "epoch": 1.6243902439024391,
2338
- "grad_norm": 1.4070931673049927,
2339
- "learning_rate": 4.683342787170644e-06,
2340
- "loss": 0.4218,
2341
- "step": 333
2342
- },
2343
- {
2344
- "epoch": 1.629268292682927,
2345
- "grad_norm": 1.7011152505874634,
2346
- "learning_rate": 4.6814739790169006e-06,
2347
- "loss": 0.4429,
2348
- "step": 334
2349
- },
2350
- {
2351
- "epoch": 1.6341463414634148,
2352
- "grad_norm": 1.8267077207565308,
2353
- "learning_rate": 4.679600047655313e-06,
2354
- "loss": 0.6062,
2355
- "step": 335
2356
- },
2357
- {
2358
- "epoch": 1.6390243902439026,
2359
- "grad_norm": 2.120957612991333,
2360
- "learning_rate": 4.6777209974868194e-06,
2361
- "loss": 0.9207,
2362
- "step": 336
2363
- },
2364
- {
2365
- "epoch": 1.6439024390243904,
2366
- "grad_norm": 1.6445318460464478,
2367
- "learning_rate": 4.675836832924387e-06,
2368
- "loss": 0.4324,
2369
- "step": 337
2370
- },
2371
- {
2372
- "epoch": 1.6487804878048782,
2373
- "grad_norm": 1.2712141275405884,
2374
- "learning_rate": 4.673947558392989e-06,
2375
- "loss": 0.2775,
2376
- "step": 338
2377
- },
2378
- {
2379
- "epoch": 1.653658536585366,
2380
- "grad_norm": 1.1474132537841797,
2381
- "learning_rate": 4.6720531783296e-06,
2382
- "loss": 0.3437,
2383
- "step": 339
2384
- },
2385
- {
2386
- "epoch": 1.6585365853658538,
2387
- "grad_norm": 1.6361477375030518,
2388
- "learning_rate": 4.670153697183185e-06,
2389
- "loss": 0.4716,
2390
- "step": 340
2391
- },
2392
- {
2393
- "epoch": 1.6634146341463416,
2394
- "grad_norm": 1.6338160037994385,
2395
- "learning_rate": 4.668249119414692e-06,
2396
- "loss": 0.4693,
2397
- "step": 341
2398
- },
2399
- {
2400
- "epoch": 1.6682926829268294,
2401
- "grad_norm": 1.531384825706482,
2402
- "learning_rate": 4.666339449497033e-06,
2403
- "loss": 0.5064,
2404
- "step": 342
2405
- },
2406
- {
2407
- "epoch": 1.6731707317073172,
2408
- "grad_norm": 1.8817652463912964,
2409
- "learning_rate": 4.664424691915084e-06,
2410
- "loss": 0.4733,
2411
- "step": 343
2412
- },
2413
- {
2414
- "epoch": 1.678048780487805,
2415
- "grad_norm": 2.0723443031311035,
2416
- "learning_rate": 4.6625048511656675e-06,
2417
- "loss": 0.48,
2418
- "step": 344
2419
- },
2420
- {
2421
- "epoch": 1.6829268292682928,
2422
- "grad_norm": 1.6939678192138672,
2423
- "learning_rate": 4.660579931757543e-06,
2424
- "loss": 0.3561,
2425
- "step": 345
2426
- },
2427
- {
2428
- "epoch": 1.6878048780487804,
2429
- "grad_norm": 1.1976394653320312,
2430
- "learning_rate": 4.6586499382113985e-06,
2431
- "loss": 0.405,
2432
- "step": 346
2433
- },
2434
- {
2435
- "epoch": 1.6926829268292682,
2436
- "grad_norm": 1.6529325246810913,
2437
- "learning_rate": 4.6567148750598375e-06,
2438
- "loss": 0.6813,
2439
- "step": 347
2440
- },
2441
- {
2442
- "epoch": 1.697560975609756,
2443
- "grad_norm": 2.042006492614746,
2444
- "learning_rate": 4.6547747468473705e-06,
2445
- "loss": 0.7769,
2446
- "step": 348
2447
- },
2448
- {
2449
- "epoch": 1.7024390243902439,
2450
- "grad_norm": 1.7209240198135376,
2451
- "learning_rate": 4.652829558130404e-06,
2452
- "loss": 0.3444,
2453
- "step": 349
2454
- },
2455
- {
2456
- "epoch": 1.7073170731707317,
2457
- "grad_norm": 1.9713730812072754,
2458
- "learning_rate": 4.6508793134772265e-06,
2459
- "loss": 0.4758,
2460
- "step": 350
2461
- },
2462
- {
2463
- "epoch": 1.7121951219512195,
2464
- "grad_norm": 1.1846798658370972,
2465
- "learning_rate": 4.648924017468003e-06,
2466
- "loss": 0.3516,
2467
- "step": 351
2468
- },
2469
- {
2470
- "epoch": 1.7170731707317073,
2471
- "grad_norm": 1.9596673250198364,
2472
- "learning_rate": 4.646963674694761e-06,
2473
- "loss": 0.6718,
2474
- "step": 352
2475
- },
2476
- {
2477
- "epoch": 1.721951219512195,
2478
- "grad_norm": 1.368725061416626,
2479
- "learning_rate": 4.64499828976138e-06,
2480
- "loss": 0.2733,
2481
- "step": 353
2482
- },
2483
- {
2484
- "epoch": 1.726829268292683,
2485
- "grad_norm": 1.7530791759490967,
2486
- "learning_rate": 4.64302786728358e-06,
2487
- "loss": 0.384,
2488
- "step": 354
2489
- },
2490
- {
2491
- "epoch": 1.7317073170731707,
2492
- "grad_norm": 1.4671913385391235,
2493
- "learning_rate": 4.641052411888913e-06,
2494
- "loss": 0.4127,
2495
- "step": 355
2496
- },
2497
- {
2498
- "epoch": 1.7365853658536585,
2499
- "grad_norm": 2.0249969959259033,
2500
- "learning_rate": 4.6390719282167515e-06,
2501
- "loss": 0.3638,
2502
- "step": 356
2503
- },
2504
- {
2505
- "epoch": 1.7414634146341463,
2506
- "grad_norm": 1.5483126640319824,
2507
- "learning_rate": 4.637086420918276e-06,
2508
- "loss": 0.6348,
2509
- "step": 357
2510
- },
2511
- {
2512
- "epoch": 1.7463414634146341,
2513
- "grad_norm": 1.5062689781188965,
2514
- "learning_rate": 4.635095894656465e-06,
2515
- "loss": 0.5401,
2516
- "step": 358
2517
- },
2518
- {
2519
- "epoch": 1.751219512195122,
2520
- "grad_norm": 1.1978603601455688,
2521
- "learning_rate": 4.633100354106085e-06,
2522
- "loss": 0.2961,
2523
- "step": 359
2524
- },
2525
- {
2526
- "epoch": 1.7560975609756098,
2527
- "grad_norm": 1.768799901008606,
2528
- "learning_rate": 4.631099803953677e-06,
2529
- "loss": 0.7047,
2530
- "step": 360
2531
- },
2532
- {
2533
- "epoch": 1.7609756097560976,
2534
- "grad_norm": 1.4491766691207886,
2535
- "learning_rate": 4.629094248897546e-06,
2536
- "loss": 0.3413,
2537
- "step": 361
2538
- },
2539
- {
2540
- "epoch": 1.7658536585365854,
2541
- "grad_norm": 1.5324903726577759,
2542
- "learning_rate": 4.627083693647757e-06,
2543
- "loss": 0.3901,
2544
- "step": 362
2545
- },
2546
- {
2547
- "epoch": 1.7707317073170732,
2548
- "grad_norm": 1.591989517211914,
2549
- "learning_rate": 4.625068142926111e-06,
2550
- "loss": 0.6385,
2551
- "step": 363
2552
- },
2553
- {
2554
- "epoch": 1.775609756097561,
2555
- "grad_norm": 1.3414136171340942,
2556
- "learning_rate": 4.623047601466144e-06,
2557
- "loss": 0.4892,
2558
- "step": 364
2559
- },
2560
- {
2561
- "epoch": 1.7804878048780488,
2562
- "grad_norm": 1.4184423685073853,
2563
- "learning_rate": 4.621022074013114e-06,
2564
- "loss": 0.458,
2565
- "step": 365
2566
- },
2567
- {
2568
- "epoch": 1.7853658536585366,
2569
- "grad_norm": 1.8010565042495728,
2570
- "learning_rate": 4.618991565323987e-06,
2571
- "loss": 0.4202,
2572
- "step": 366
2573
- },
2574
- {
2575
- "epoch": 1.7902439024390244,
2576
- "grad_norm": 1.34525728225708,
2577
- "learning_rate": 4.616956080167426e-06,
2578
- "loss": 0.4154,
2579
- "step": 367
2580
- },
2581
- {
2582
- "epoch": 1.7951219512195122,
2583
- "grad_norm": 1.8559147119522095,
2584
- "learning_rate": 4.614915623323786e-06,
2585
- "loss": 0.7751,
2586
- "step": 368
2587
- },
2588
- {
2589
- "epoch": 1.8,
2590
- "grad_norm": 1.462708830833435,
2591
- "learning_rate": 4.612870199585092e-06,
2592
- "loss": 0.3168,
2593
- "step": 369
2594
- },
2595
- {
2596
- "epoch": 1.8048780487804879,
2597
- "grad_norm": 1.8187520503997803,
2598
- "learning_rate": 4.610819813755038e-06,
2599
- "loss": 0.4109,
2600
- "step": 370
2601
- },
2602
- {
2603
- "epoch": 1.8097560975609757,
2604
- "grad_norm": 1.5606852769851685,
2605
- "learning_rate": 4.608764470648971e-06,
2606
- "loss": 0.3183,
2607
- "step": 371
2608
- },
2609
- {
2610
- "epoch": 1.8146341463414632,
2611
- "grad_norm": 1.7732083797454834,
2612
- "learning_rate": 4.606704175093879e-06,
2613
- "loss": 0.3698,
2614
- "step": 372
2615
- },
2616
- {
2617
- "epoch": 1.819512195121951,
2618
- "grad_norm": 1.7014015913009644,
2619
- "learning_rate": 4.604638931928383e-06,
2620
- "loss": 0.6874,
2621
- "step": 373
2622
- },
2623
- {
2624
- "epoch": 1.8243902439024389,
2625
- "grad_norm": 1.4034913778305054,
2626
- "learning_rate": 4.602568746002718e-06,
2627
- "loss": 0.3915,
2628
- "step": 374
2629
- },
2630
- {
2631
- "epoch": 1.8292682926829267,
2632
- "grad_norm": 1.9640151262283325,
2633
- "learning_rate": 4.600493622178734e-06,
2634
- "loss": 0.7138,
2635
- "step": 375
2636
- },
2637
- {
2638
- "epoch": 1.8341463414634145,
2639
- "grad_norm": 1.7484368085861206,
2640
- "learning_rate": 4.598413565329876e-06,
2641
- "loss": 0.4021,
2642
- "step": 376
2643
- },
2644
- {
2645
- "epoch": 1.8390243902439023,
2646
- "grad_norm": 1.303987979888916,
2647
- "learning_rate": 4.596328580341169e-06,
2648
- "loss": 0.435,
2649
- "step": 377
2650
- },
2651
- {
2652
- "epoch": 1.84390243902439,
2653
- "grad_norm": 1.7868525981903076,
2654
- "learning_rate": 4.5942386721092195e-06,
2655
- "loss": 0.5827,
2656
- "step": 378
2657
- },
2658
- {
2659
- "epoch": 1.848780487804878,
2660
- "grad_norm": 1.9132968187332153,
2661
- "learning_rate": 4.592143845542189e-06,
2662
- "loss": 0.5401,
2663
- "step": 379
2664
- },
2665
- {
2666
- "epoch": 1.8536585365853657,
2667
- "grad_norm": 1.8668831586837769,
2668
- "learning_rate": 4.590044105559797e-06,
2669
- "loss": 0.7394,
2670
- "step": 380
2671
- },
2672
- {
2673
- "epoch": 1.8585365853658535,
2674
- "grad_norm": 1.2784031629562378,
2675
- "learning_rate": 4.587939457093296e-06,
2676
- "loss": 0.378,
2677
- "step": 381
2678
- },
2679
- {
2680
- "epoch": 1.8634146341463413,
2681
- "grad_norm": 2.4104111194610596,
2682
- "learning_rate": 4.585829905085468e-06,
2683
- "loss": 0.5195,
2684
- "step": 382
2685
- },
2686
- {
2687
- "epoch": 1.8682926829268292,
2688
- "grad_norm": 1.7685518264770508,
2689
- "learning_rate": 4.5837154544906135e-06,
2690
- "loss": 0.649,
2691
- "step": 383
2692
- },
2693
- {
2694
- "epoch": 1.873170731707317,
2695
- "grad_norm": 1.6093931198120117,
2696
- "learning_rate": 4.581596110274535e-06,
2697
- "loss": 0.529,
2698
- "step": 384
2699
- },
2700
- {
2701
- "epoch": 1.8780487804878048,
2702
- "grad_norm": 1.6542202234268188,
2703
- "learning_rate": 4.579471877414527e-06,
2704
- "loss": 0.8192,
2705
- "step": 385
2706
- },
2707
- {
2708
- "epoch": 1.8829268292682926,
2709
- "grad_norm": 2.2171826362609863,
2710
- "learning_rate": 4.577342760899368e-06,
2711
- "loss": 0.5918,
2712
- "step": 386
2713
- },
2714
- {
2715
- "epoch": 1.8878048780487804,
2716
- "grad_norm": 1.5181479454040527,
2717
- "learning_rate": 4.575208765729302e-06,
2718
- "loss": 0.4064,
2719
- "step": 387
2720
- },
2721
- {
2722
- "epoch": 1.8926829268292682,
2723
- "grad_norm": 1.5646779537200928,
2724
- "learning_rate": 4.573069896916035e-06,
2725
- "loss": 0.6088,
2726
- "step": 388
2727
- },
2728
- {
2729
- "epoch": 1.897560975609756,
2730
- "grad_norm": 1.4816564321517944,
2731
- "learning_rate": 4.5709261594827125e-06,
2732
- "loss": 0.4965,
2733
- "step": 389
2734
- },
2735
- {
2736
- "epoch": 1.9024390243902438,
2737
- "grad_norm": 1.4817956686019897,
2738
- "learning_rate": 4.568777558463922e-06,
2739
- "loss": 0.3348,
2740
- "step": 390
2741
- },
2742
- {
2743
- "epoch": 1.9073170731707316,
2744
- "grad_norm": 1.482393503189087,
2745
- "learning_rate": 4.566624098905665e-06,
2746
- "loss": 0.5659,
2747
- "step": 391
2748
- },
2749
- {
2750
- "epoch": 1.9121951219512194,
2751
- "grad_norm": 1.7307995557785034,
2752
- "learning_rate": 4.564465785865359e-06,
2753
- "loss": 0.4202,
2754
- "step": 392
2755
- },
2756
- {
2757
- "epoch": 1.9170731707317072,
2758
- "grad_norm": 1.3334952592849731,
2759
- "learning_rate": 4.56230262441182e-06,
2760
- "loss": 0.3028,
2761
- "step": 393
2762
- },
2763
- {
2764
- "epoch": 1.921951219512195,
2765
- "grad_norm": 1.3310606479644775,
2766
- "learning_rate": 4.560134619625247e-06,
2767
- "loss": 0.4366,
2768
- "step": 394
2769
- },
2770
- {
2771
- "epoch": 1.9268292682926829,
2772
- "grad_norm": 1.4220263957977295,
2773
- "learning_rate": 4.5579617765972155e-06,
2774
- "loss": 0.4635,
2775
- "step": 395
2776
- },
2777
- {
2778
- "epoch": 1.9317073170731707,
2779
- "grad_norm": 1.6790047883987427,
2780
- "learning_rate": 4.555784100430662e-06,
2781
- "loss": 0.4049,
2782
- "step": 396
2783
- },
2784
- {
2785
- "epoch": 1.9365853658536585,
2786
- "grad_norm": 1.489020824432373,
2787
- "learning_rate": 4.553601596239877e-06,
2788
- "loss": 0.3564,
2789
- "step": 397
2790
- },
2791
- {
2792
- "epoch": 1.9414634146341463,
2793
- "grad_norm": 1.3163279294967651,
2794
- "learning_rate": 4.551414269150489e-06,
2795
- "loss": 0.3862,
2796
- "step": 398
2797
- },
2798
- {
2799
- "epoch": 1.946341463414634,
2800
- "grad_norm": 1.4149785041809082,
2801
- "learning_rate": 4.54922212429945e-06,
2802
- "loss": 0.3606,
2803
- "step": 399
2804
- },
2805
- {
2806
- "epoch": 1.951219512195122,
2807
- "grad_norm": 1.5937131643295288,
2808
- "learning_rate": 4.547025166835027e-06,
2809
- "loss": 0.506,
2810
- "step": 400
2811
- },
2812
- {
2813
- "epoch": 1.9560975609756097,
2814
- "grad_norm": 1.6854530572891235,
2815
- "learning_rate": 4.544823401916794e-06,
2816
- "loss": 0.6974,
2817
- "step": 401
2818
- },
2819
- {
2820
- "epoch": 1.9609756097560975,
2821
- "grad_norm": 1.5636667013168335,
2822
- "learning_rate": 4.542616834715612e-06,
2823
- "loss": 0.4172,
2824
- "step": 402
2825
- },
2826
- {
2827
- "epoch": 1.9658536585365853,
2828
- "grad_norm": 1.484148383140564,
2829
- "learning_rate": 4.540405470413618e-06,
2830
- "loss": 0.3189,
2831
- "step": 403
2832
- },
2833
- {
2834
- "epoch": 1.9707317073170731,
2835
- "grad_norm": 1.4421648979187012,
2836
- "learning_rate": 4.53818931420422e-06,
2837
- "loss": 0.5316,
2838
- "step": 404
2839
- },
2840
- {
2841
- "epoch": 1.975609756097561,
2842
- "grad_norm": 1.4186997413635254,
2843
- "learning_rate": 4.535968371292076e-06,
2844
- "loss": 0.4661,
2845
- "step": 405
2846
- },
2847
- {
2848
- "epoch": 1.9804878048780488,
2849
- "grad_norm": 1.2781983613967896,
2850
- "learning_rate": 4.533742646893086e-06,
2851
- "loss": 0.4689,
2852
- "step": 406
2853
- },
2854
- {
2855
- "epoch": 1.9853658536585366,
2856
- "grad_norm": 1.588996171951294,
2857
- "learning_rate": 4.531512146234383e-06,
2858
- "loss": 0.5309,
2859
- "step": 407
2860
- },
2861
- {
2862
- "epoch": 1.9902439024390244,
2863
- "grad_norm": 1.7258890867233276,
2864
- "learning_rate": 4.529276874554312e-06,
2865
- "loss": 0.7082,
2866
- "step": 408
2867
- },
2868
- {
2869
- "epoch": 1.9951219512195122,
2870
- "grad_norm": 1.4261395931243896,
2871
- "learning_rate": 4.527036837102426e-06,
2872
- "loss": 0.3534,
2873
- "step": 409
2874
- },
2875
- {
2876
- "epoch": 2.0,
2877
- "grad_norm": 1.835606575012207,
2878
- "learning_rate": 4.524792039139471e-06,
2879
- "loss": 0.5411,
2880
- "step": 410
2881
- }
2882
- ],
2883
- "logging_steps": 1,
2884
- "max_steps": 2050,
2885
- "num_input_tokens_seen": 0,
2886
- "num_train_epochs": 10,
2887
- "save_steps": 208,
2888
- "stateful_callbacks": {
2889
- "TrainerControl": {
2890
- "args": {
2891
- "should_epoch_stop": false,
2892
- "should_evaluate": false,
2893
- "should_log": false,
2894
- "should_save": true,
2895
- "should_training_stop": false
2896
- },
2897
- "attributes": {}
2898
- }
2899
- },
2900
- "total_flos": 1.2119917182163354e+17,
2901
- "train_batch_size": 1,
2902
- "trial_name": null,
2903
- "trial_params": null
2904
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-410/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
qwen2_5_7b_instruct/limo/checkpoint-615/added_tokens.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "</tool_call>": 151658,
3
- "<tool_call>": 151657,
4
- "<|box_end|>": 151649,
5
- "<|box_start|>": 151648,
6
- "<|endoftext|>": 151643,
7
- "<|file_sep|>": 151664,
8
- "<|fim_middle|>": 151660,
9
- "<|fim_pad|>": 151662,
10
- "<|fim_prefix|>": 151659,
11
- "<|fim_suffix|>": 151661,
12
- "<|im_end|>": 151645,
13
- "<|im_start|>": 151644,
14
- "<|image_pad|>": 151655,
15
- "<|object_ref_end|>": 151647,
16
- "<|object_ref_start|>": 151646,
17
- "<|quad_end|>": 151651,
18
- "<|quad_start|>": 151650,
19
- "<|repo_name|>": 151663,
20
- "<|video_pad|>": 151656,
21
- "<|vision_end|>": 151653,
22
- "<|vision_pad|>": 151654,
23
- "<|vision_start|>": 151652
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-615/chat_template.jinja DELETED
@@ -1,54 +0,0 @@
1
- {%- if tools %}
2
- {{- '<|im_start|>system\n' }}
3
- {%- if messages[0]['role'] == 'system' %}
4
- {{- messages[0]['content'] }}
5
- {%- else %}
6
- {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
- {%- endif %}
8
- {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
- {%- for tool in tools %}
10
- {{- "\n" }}
11
- {{- tool | tojson }}
12
- {%- endfor %}
13
- {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
- {%- else %}
15
- {%- if messages[0]['role'] == 'system' %}
16
- {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
- {%- else %}
18
- {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
- {%- endif %}
20
- {%- endif %}
21
- {%- for message in messages %}
22
- {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
- {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
- {%- elif message.role == "assistant" %}
25
- {{- '<|im_start|>' + message.role }}
26
- {%- if message.content %}
27
- {{- '\n' + message.content }}
28
- {%- endif %}
29
- {%- for tool_call in message.tool_calls %}
30
- {%- if tool_call.function is defined %}
31
- {%- set tool_call = tool_call.function %}
32
- {%- endif %}
33
- {{- '\n<tool_call>\n{"name": "' }}
34
- {{- tool_call.name }}
35
- {{- '", "arguments": ' }}
36
- {{- tool_call.arguments | tojson }}
37
- {{- '}\n</tool_call>' }}
38
- {%- endfor %}
39
- {{- '<|im_end|>\n' }}
40
- {%- elif message.role == "tool" %}
41
- {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
- {{- '<|im_start|>user' }}
43
- {%- endif %}
44
- {{- '\n<tool_response>\n' }}
45
- {{- message.content }}
46
- {{- '\n</tool_response>' }}
47
- {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
- {{- '<|im_end|>\n' }}
49
- {%- endif %}
50
- {%- endif %}
51
- {%- endfor %}
52
- {%- if add_generation_prompt %}
53
- {{- '<|im_start|>assistant\n' }}
54
- {%- endif %}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-615/config.json DELETED
@@ -1,58 +0,0 @@
1
- {
2
- "architectures": [
3
- "Qwen2ForCausalLM"
4
- ],
5
- "attention_dropout": 0.0,
6
- "bos_token_id": 151643,
7
- "eos_token_id": 151645,
8
- "hidden_act": "silu",
9
- "hidden_size": 3584,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 18944,
12
- "layer_types": [
13
- "full_attention",
14
- "full_attention",
15
- "full_attention",
16
- "full_attention",
17
- "full_attention",
18
- "full_attention",
19
- "full_attention",
20
- "full_attention",
21
- "full_attention",
22
- "full_attention",
23
- "full_attention",
24
- "full_attention",
25
- "full_attention",
26
- "full_attention",
27
- "full_attention",
28
- "full_attention",
29
- "full_attention",
30
- "full_attention",
31
- "full_attention",
32
- "full_attention",
33
- "full_attention",
34
- "full_attention",
35
- "full_attention",
36
- "full_attention",
37
- "full_attention",
38
- "full_attention",
39
- "full_attention",
40
- "full_attention"
41
- ],
42
- "max_position_embeddings": 32768,
43
- "max_window_layers": 28,
44
- "model_type": "qwen2",
45
- "num_attention_heads": 28,
46
- "num_hidden_layers": 28,
47
- "num_key_value_heads": 4,
48
- "rms_norm_eps": 1e-06,
49
- "rope_scaling": null,
50
- "rope_theta": 1000000.0,
51
- "sliding_window": null,
52
- "tie_word_embeddings": false,
53
- "torch_dtype": "float32",
54
- "transformers_version": "4.55.0",
55
- "use_cache": false,
56
- "use_sliding_window": false,
57
- "vocab_size": 152064
58
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen2_5_7b_instruct/limo/checkpoint-615/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 151643,
4
- "eos_token_id": 151645,
5
- "transformers_version": "4.55.0",
6
- "use_cache": false
7
- }