arynkiewicz commited on
Commit
54198be
·
verified ·
1 Parent(s): 78a027f

Model save

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Qwen/Qwen3-4B-Base
3
+ library_name: transformers
4
+ model_name: ner-on-types
5
+ tags:
6
+ - generated_from_trainer
7
+ - sft
8
+ - trl
9
+ licence: license
10
+ ---
11
+
12
+ # Model Card for ner-on-types
13
+
14
+ This model is a fine-tuned version of [Qwen/Qwen3-4B-Base](https://huggingface.co/Qwen/Qwen3-4B-Base).
15
+ It has been trained using [TRL](https://github.com/huggingface/trl).
16
+
17
+ ## Quick start
18
+
19
+ ```python
20
+ from transformers import pipeline
21
+
22
+ question = "If you had a time machine, but could only go to the past or the future once and never return, which would you choose and why?"
23
+ generator = pipeline("text-generation", model="arynkiewicz/ner-on-types", device="cuda")
24
+ output = generator([{"role": "user", "content": question}], max_new_tokens=128, return_full_text=False)[0]
25
+ print(output["generated_text"])
26
+ ```
27
+
28
+ ## Training procedure
29
+
30
+
31
+
32
+
33
+ This model was trained with SFT.
34
+
35
+ ### Framework versions
36
+
37
+ - TRL: 0.23.0
38
+ - Transformers: 4.56.1
39
+ - Pytorch: 2.6.0
40
+ - Datasets: 4.0.0
41
+ - Tokenizers: 0.22.0
42
+
43
+ ## Citations
44
+
45
+
46
+
47
+ Cite TRL as:
48
+
49
+ ```bibtex
50
+ @misc{vonwerra2022trl,
51
+ title = {{TRL: Transformer Reinforcement Learning}},
52
+ author = {Leandro von Werra and Younes Belkada and Lewis Tunstall and Edward Beeching and Tristan Thrush and Nathan Lambert and Shengyi Huang and Kashif Rasul and Quentin Gallou{\'e}dec},
53
+ year = 2020,
54
+ journal = {GitHub repository},
55
+ publisher = {GitHub},
56
+ howpublished = {\url{https://github.com/huggingface/trl}}
57
+ }
58
+ ```
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
all_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "num_tokens": 565489014.0,
4
+ "total_flos": 722584728633344.0,
5
+ "train_loss": 1.0774097926684294,
6
+ "train_runtime": 15585.9875,
7
+ "train_samples": 100000,
8
+ "train_samples_per_second": 19.248,
9
+ "train_steps_per_second": 0.301
10
+ }
chat_template.jinja ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {% for message in messages %}
2
+ {% if message['role'] == 'user' %}
3
+ {{ '<|user|>
4
+ ' + message['content'] + eos_token }}
5
+ {% elif message['role'] == 'system' %}
6
+ {{ '<|system|>
7
+ ' + message['content'] + eos_token }}
8
+ {% elif message['role'] == 'assistant' %}
9
+ {{ '<|assistant|>
10
+ ' + message['content'] + eos_token }}
11
+ {% endif %}
12
+ {% if loop.last and add_generation_prompt %}
13
+ {{ '<|assistant|>' }}
14
+ {% endif %}
15
+ {% endfor %}
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 151645,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2560,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 9728,
14
+ "layer_types": [
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention"
51
+ ],
52
+ "max_position_embeddings": 32768,
53
+ "max_window_layers": 36,
54
+ "model_type": "qwen3",
55
+ "num_attention_heads": 32,
56
+ "num_hidden_layers": 36,
57
+ "num_key_value_heads": 8,
58
+ "pad_token_id": 151643,
59
+ "rms_norm_eps": 1e-06,
60
+ "rope_scaling": null,
61
+ "rope_theta": 1000000,
62
+ "sliding_window": null,
63
+ "tie_word_embeddings": true,
64
+ "transformers_version": "4.56.1",
65
+ "use_cache": false,
66
+ "use_sliding_window": false,
67
+ "vocab_size": 151936
68
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token_id": 151645,
3
+ "max_new_tokens": 2048,
4
+ "pad_token_id": 151643,
5
+ "transformers_version": "4.56.1"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da26cb28fc67a154fe691456101add837ead7e5e082784ec2e69a15da92fe1c
3
+ size 4967215360
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c55e5dbce9c82184f6070fb166200ba567858605001c4c6a735317bb0b98e445
3
+ size 3077766632
model.safetensors.index.json ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 196096,
4
+ "total_size": 8044936192
5
+ },
6
+ "weight_map": {
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.17.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.17.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.18.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
126
+ "model.layers.18.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.19.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
136
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.19.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
138
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
145
+ "model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
148
+ "model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
150
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
154
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
155
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.20.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
157
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.20.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
160
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
162
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
198
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
199
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
200
+ "model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
201
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
202
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
203
+ "model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
204
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
205
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
206
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
207
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
208
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
209
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
210
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
211
+ "model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
212
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
213
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
214
+ "model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
215
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
216
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
217
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
218
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
219
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
220
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
221
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
222
+ "model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
223
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
224
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
225
+ "model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
226
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
227
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
228
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
229
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
230
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
231
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
232
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
233
+ "model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
234
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
235
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
236
+ "model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
237
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
238
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
239
+ "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
240
+ "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
241
+ "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
242
+ "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
243
+ "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
244
+ "model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
245
+ "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
246
+ "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
247
+ "model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
248
+ "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
249
+ "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
250
+ "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
251
+ "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
252
+ "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
253
+ "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
254
+ "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
255
+ "model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
256
+ "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
257
+ "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
258
+ "model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
259
+ "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
260
+ "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
261
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
262
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
263
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
264
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
265
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
266
+ "model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
267
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
268
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
269
+ "model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
270
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
272
+ "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
273
+ "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
274
+ "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
275
+ "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
276
+ "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
277
+ "model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
278
+ "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
279
+ "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
280
+ "model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
281
+ "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
282
+ "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
283
+ "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
284
+ "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
285
+ "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
286
+ "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
287
+ "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
288
+ "model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
289
+ "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
290
+ "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
291
+ "model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
292
+ "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
293
+ "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
294
+ "model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
295
+ "model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
296
+ "model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
297
+ "model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
298
+ "model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
299
+ "model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
300
+ "model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
301
+ "model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
302
+ "model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
303
+ "model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
304
+ "model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
305
+ "model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
306
+ "model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
307
+ "model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
308
+ "model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
309
+ "model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
310
+ "model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
311
+ "model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
312
+ "model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
313
+ "model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
314
+ "model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
315
+ "model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
316
+ "model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
317
+ "model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
318
+ "model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
319
+ "model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
320
+ "model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
321
+ "model.layers.34.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
322
+ "model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
323
+ "model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
324
+ "model.layers.34.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
325
+ "model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
326
+ "model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
327
+ "model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
328
+ "model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
329
+ "model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
330
+ "model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
331
+ "model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
332
+ "model.layers.35.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
333
+ "model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
334
+ "model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
335
+ "model.layers.35.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
336
+ "model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
337
+ "model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
338
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
339
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
340
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
341
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
342
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
343
+ "model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
344
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
345
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
346
+ "model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
347
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
348
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
349
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
350
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
351
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
352
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
353
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
354
+ "model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
355
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
356
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
357
+ "model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
358
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
359
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
360
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
361
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
362
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
363
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
364
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
365
+ "model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
366
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
367
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
368
+ "model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
369
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
370
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
371
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
372
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
373
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
374
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
375
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
376
+ "model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
377
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
378
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
379
+ "model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
380
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
381
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
382
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
383
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
384
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
385
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
386
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
387
+ "model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
388
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
389
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
390
+ "model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
391
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
392
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
393
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
394
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
395
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
396
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
397
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
398
+ "model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
399
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
400
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
401
+ "model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
402
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
403
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
404
+ "model.norm.weight": "model-00002-of-00002.safetensors"
405
+ }
406
+ }
runs/Dec03_23-16-28_gpu22/events.out.tfevents.1764800263.gpu22.3221594.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:601f369eafa43d6da496620b997900d8f6400d0e76117731e8046dd20318497a
3
+ size 131666
special_tokens_map.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": "<|im_end|>",
18
+ "pad_token": {
19
+ "content": "<|endoftext|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ }
25
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "clean_up_tokenization_spaces": false,
231
+ "eos_token": "<|im_end|>",
232
+ "errors": "replace",
233
+ "extra_special_tokens": {},
234
+ "model_max_length": 131072,
235
+ "pad_token": "<|endoftext|>",
236
+ "split_special_tokens": false,
237
+ "tokenizer_class": "Qwen2Tokenizer",
238
+ "unk_token": null
239
+ }
train_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "num_tokens": 565489014.0,
4
+ "total_flos": 722584728633344.0,
5
+ "train_loss": 1.0774097926684294,
6
+ "train_runtime": 15585.9875,
7
+ "train_samples": 100000,
8
+ "train_samples_per_second": 19.248,
9
+ "train_steps_per_second": 0.301
10
+ }
trainer_state.json ADDED
@@ -0,0 +1,3788 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 3.0,
6
+ "eval_steps": 500,
7
+ "global_step": 4689,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0064,
14
+ "grad_norm": 2.760316424583359,
15
+ "learning_rate": 3.1914893617021275e-07,
16
+ "loss": 1.6571,
17
+ "num_tokens": 1208276.0,
18
+ "step": 10
19
+ },
20
+ {
21
+ "epoch": 0.0128,
22
+ "grad_norm": 1.9639395470560352,
23
+ "learning_rate": 6.73758865248227e-07,
24
+ "loss": 1.6372,
25
+ "num_tokens": 2410446.0,
26
+ "step": 20
27
+ },
28
+ {
29
+ "epoch": 0.0192,
30
+ "grad_norm": 1.6857950160814903,
31
+ "learning_rate": 1.0283687943262412e-06,
32
+ "loss": 1.6138,
33
+ "num_tokens": 3622536.0,
34
+ "step": 30
35
+ },
36
+ {
37
+ "epoch": 0.0256,
38
+ "grad_norm": 1.9355649405079267,
39
+ "learning_rate": 1.3829787234042555e-06,
40
+ "loss": 1.554,
41
+ "num_tokens": 4837847.0,
42
+ "step": 40
43
+ },
44
+ {
45
+ "epoch": 0.032,
46
+ "grad_norm": 1.0134479979543427,
47
+ "learning_rate": 1.7375886524822697e-06,
48
+ "loss": 1.5138,
49
+ "num_tokens": 6044886.0,
50
+ "step": 50
51
+ },
52
+ {
53
+ "epoch": 0.0384,
54
+ "grad_norm": 0.7097712225560386,
55
+ "learning_rate": 2.092198581560284e-06,
56
+ "loss": 1.4577,
57
+ "num_tokens": 7255346.0,
58
+ "step": 60
59
+ },
60
+ {
61
+ "epoch": 0.0448,
62
+ "grad_norm": 0.7563602572113316,
63
+ "learning_rate": 2.446808510638298e-06,
64
+ "loss": 1.4239,
65
+ "num_tokens": 8465627.0,
66
+ "step": 70
67
+ },
68
+ {
69
+ "epoch": 0.0512,
70
+ "grad_norm": 0.6411265148411116,
71
+ "learning_rate": 2.8014184397163125e-06,
72
+ "loss": 1.3857,
73
+ "num_tokens": 9667266.0,
74
+ "step": 80
75
+ },
76
+ {
77
+ "epoch": 0.0576,
78
+ "grad_norm": 0.7071256376230877,
79
+ "learning_rate": 3.1560283687943267e-06,
80
+ "loss": 1.3736,
81
+ "num_tokens": 10869831.0,
82
+ "step": 90
83
+ },
84
+ {
85
+ "epoch": 0.064,
86
+ "grad_norm": 0.7623180191305359,
87
+ "learning_rate": 3.510638297872341e-06,
88
+ "loss": 1.3722,
89
+ "num_tokens": 12083093.0,
90
+ "step": 100
91
+ },
92
+ {
93
+ "epoch": 0.0704,
94
+ "grad_norm": 0.651385333897087,
95
+ "learning_rate": 3.865248226950355e-06,
96
+ "loss": 1.3468,
97
+ "num_tokens": 13290331.0,
98
+ "step": 110
99
+ },
100
+ {
101
+ "epoch": 0.0768,
102
+ "grad_norm": 0.8706225351642094,
103
+ "learning_rate": 4.219858156028369e-06,
104
+ "loss": 1.3387,
105
+ "num_tokens": 14488386.0,
106
+ "step": 120
107
+ },
108
+ {
109
+ "epoch": 0.0832,
110
+ "grad_norm": 0.84726755662717,
111
+ "learning_rate": 4.574468085106383e-06,
112
+ "loss": 1.3364,
113
+ "num_tokens": 15690608.0,
114
+ "step": 130
115
+ },
116
+ {
117
+ "epoch": 0.0896,
118
+ "grad_norm": 0.8553144960607314,
119
+ "learning_rate": 4.929078014184397e-06,
120
+ "loss": 1.3207,
121
+ "num_tokens": 16894120.0,
122
+ "step": 140
123
+ },
124
+ {
125
+ "epoch": 0.096,
126
+ "grad_norm": 0.6845288880044453,
127
+ "learning_rate": 4.999961827753897e-06,
128
+ "loss": 1.3072,
129
+ "num_tokens": 18098866.0,
130
+ "step": 150
131
+ },
132
+ {
133
+ "epoch": 0.1024,
134
+ "grad_norm": 0.7060413425833653,
135
+ "learning_rate": 4.999806755001946e-06,
136
+ "loss": 1.293,
137
+ "num_tokens": 19317515.0,
138
+ "step": 160
139
+ },
140
+ {
141
+ "epoch": 0.1088,
142
+ "grad_norm": 1.112301905134234,
143
+ "learning_rate": 4.999532403372408e-06,
144
+ "loss": 1.2933,
145
+ "num_tokens": 20523986.0,
146
+ "step": 170
147
+ },
148
+ {
149
+ "epoch": 0.1152,
150
+ "grad_norm": 0.7057273926728088,
151
+ "learning_rate": 4.9991387859560365e-06,
152
+ "loss": 1.3105,
153
+ "num_tokens": 21730204.0,
154
+ "step": 180
155
+ },
156
+ {
157
+ "epoch": 0.1216,
158
+ "grad_norm": 0.7046621457199816,
159
+ "learning_rate": 4.9986259215343814e-06,
160
+ "loss": 1.3036,
161
+ "num_tokens": 22941629.0,
162
+ "step": 190
163
+ },
164
+ {
165
+ "epoch": 0.128,
166
+ "grad_norm": 0.6753839003505228,
167
+ "learning_rate": 4.997993834578891e-06,
168
+ "loss": 1.2837,
169
+ "num_tokens": 24149743.0,
170
+ "step": 200
171
+ },
172
+ {
173
+ "epoch": 0.1344,
174
+ "grad_norm": 0.6833117540920727,
175
+ "learning_rate": 4.997242555249746e-06,
176
+ "loss": 1.2798,
177
+ "num_tokens": 25350421.0,
178
+ "step": 210
179
+ },
180
+ {
181
+ "epoch": 0.1408,
182
+ "grad_norm": 0.7496565711502305,
183
+ "learning_rate": 4.996372119394418e-06,
184
+ "loss": 1.2872,
185
+ "num_tokens": 26553851.0,
186
+ "step": 220
187
+ },
188
+ {
189
+ "epoch": 0.1472,
190
+ "grad_norm": 0.8257784450438341,
191
+ "learning_rate": 4.9953825685459635e-06,
192
+ "loss": 1.2715,
193
+ "num_tokens": 27756494.0,
194
+ "step": 230
195
+ },
196
+ {
197
+ "epoch": 0.1536,
198
+ "grad_norm": 0.8586750458312551,
199
+ "learning_rate": 4.994273949921038e-06,
200
+ "loss": 1.273,
201
+ "num_tokens": 28966311.0,
202
+ "step": 240
203
+ },
204
+ {
205
+ "epoch": 0.16,
206
+ "grad_norm": 0.8942167127143708,
207
+ "learning_rate": 4.993046316417643e-06,
208
+ "loss": 1.2615,
209
+ "num_tokens": 30165165.0,
210
+ "step": 250
211
+ },
212
+ {
213
+ "epoch": 0.1664,
214
+ "grad_norm": 0.7320667303892974,
215
+ "learning_rate": 4.991699726612607e-06,
216
+ "loss": 1.2598,
217
+ "num_tokens": 31372687.0,
218
+ "step": 260
219
+ },
220
+ {
221
+ "epoch": 0.1728,
222
+ "grad_norm": 0.7759159652826615,
223
+ "learning_rate": 4.990234244758785e-06,
224
+ "loss": 1.2378,
225
+ "num_tokens": 32578240.0,
226
+ "step": 270
227
+ },
228
+ {
229
+ "epoch": 0.1792,
230
+ "grad_norm": 0.7081937298786585,
231
+ "learning_rate": 4.988649940781992e-06,
232
+ "loss": 1.2496,
233
+ "num_tokens": 33788704.0,
234
+ "step": 280
235
+ },
236
+ {
237
+ "epoch": 0.1856,
238
+ "grad_norm": 0.8354872354621143,
239
+ "learning_rate": 4.986946890277673e-06,
240
+ "loss": 1.239,
241
+ "num_tokens": 34992041.0,
242
+ "step": 290
243
+ },
244
+ {
245
+ "epoch": 0.192,
246
+ "grad_norm": 0.7419306542972816,
247
+ "learning_rate": 4.9851251745072905e-06,
248
+ "loss": 1.2334,
249
+ "num_tokens": 36202424.0,
250
+ "step": 300
251
+ },
252
+ {
253
+ "epoch": 0.1984,
254
+ "grad_norm": 0.8124424043952861,
255
+ "learning_rate": 4.983184880394447e-06,
256
+ "loss": 1.2423,
257
+ "num_tokens": 37406998.0,
258
+ "step": 310
259
+ },
260
+ {
261
+ "epoch": 0.2048,
262
+ "grad_norm": 0.9137121442594122,
263
+ "learning_rate": 4.981126100520743e-06,
264
+ "loss": 1.2398,
265
+ "num_tokens": 38614024.0,
266
+ "step": 320
267
+ },
268
+ {
269
+ "epoch": 0.2112,
270
+ "grad_norm": 0.8692171799253517,
271
+ "learning_rate": 4.978948933121351e-06,
272
+ "loss": 1.2274,
273
+ "num_tokens": 39818938.0,
274
+ "step": 330
275
+ },
276
+ {
277
+ "epoch": 0.2176,
278
+ "grad_norm": 0.7959433307352174,
279
+ "learning_rate": 4.976653482080335e-06,
280
+ "loss": 1.2432,
281
+ "num_tokens": 41029985.0,
282
+ "step": 340
283
+ },
284
+ {
285
+ "epoch": 0.224,
286
+ "grad_norm": 0.9183385731990914,
287
+ "learning_rate": 4.97423985692569e-06,
288
+ "loss": 1.2183,
289
+ "num_tokens": 42241595.0,
290
+ "step": 350
291
+ },
292
+ {
293
+ "epoch": 0.2304,
294
+ "grad_norm": 0.8800279308744207,
295
+ "learning_rate": 4.97170817282412e-06,
296
+ "loss": 1.2174,
297
+ "num_tokens": 43436994.0,
298
+ "step": 360
299
+ },
300
+ {
301
+ "epoch": 0.2368,
302
+ "grad_norm": 0.8482042891364965,
303
+ "learning_rate": 4.969058550575535e-06,
304
+ "loss": 1.214,
305
+ "num_tokens": 44649051.0,
306
+ "step": 370
307
+ },
308
+ {
309
+ "epoch": 0.2432,
310
+ "grad_norm": 0.8597854654288322,
311
+ "learning_rate": 4.966291116607297e-06,
312
+ "loss": 1.2105,
313
+ "num_tokens": 45857075.0,
314
+ "step": 380
315
+ },
316
+ {
317
+ "epoch": 0.2496,
318
+ "grad_norm": 0.8904371734549302,
319
+ "learning_rate": 4.96340600296818e-06,
320
+ "loss": 1.1976,
321
+ "num_tokens": 47059498.0,
322
+ "step": 390
323
+ },
324
+ {
325
+ "epoch": 0.256,
326
+ "grad_norm": 0.864096324906862,
327
+ "learning_rate": 4.960403347322069e-06,
328
+ "loss": 1.2067,
329
+ "num_tokens": 48273286.0,
330
+ "step": 400
331
+ },
332
+ {
333
+ "epoch": 0.2624,
334
+ "grad_norm": 0.8417001685001565,
335
+ "learning_rate": 4.957283292941401e-06,
336
+ "loss": 1.2012,
337
+ "num_tokens": 49479835.0,
338
+ "step": 410
339
+ },
340
+ {
341
+ "epoch": 0.2688,
342
+ "grad_norm": 0.8738206939182319,
343
+ "learning_rate": 4.954045988700315e-06,
344
+ "loss": 1.2081,
345
+ "num_tokens": 50692484.0,
346
+ "step": 420
347
+ },
348
+ {
349
+ "epoch": 0.2752,
350
+ "grad_norm": 0.9214341760640065,
351
+ "learning_rate": 4.9506915890675566e-06,
352
+ "loss": 1.1982,
353
+ "num_tokens": 51904151.0,
354
+ "step": 430
355
+ },
356
+ {
357
+ "epoch": 0.2816,
358
+ "grad_norm": 0.8270044046785595,
359
+ "learning_rate": 4.94722025409911e-06,
360
+ "loss": 1.2003,
361
+ "num_tokens": 53107439.0,
362
+ "step": 440
363
+ },
364
+ {
365
+ "epoch": 0.288,
366
+ "grad_norm": 0.9325298797380837,
367
+ "learning_rate": 4.943632149430552e-06,
368
+ "loss": 1.1934,
369
+ "num_tokens": 54311802.0,
370
+ "step": 450
371
+ },
372
+ {
373
+ "epoch": 0.2944,
374
+ "grad_norm": 0.8173318542721012,
375
+ "learning_rate": 4.9399274462691555e-06,
376
+ "loss": 1.183,
377
+ "num_tokens": 55516169.0,
378
+ "step": 460
379
+ },
380
+ {
381
+ "epoch": 0.3008,
382
+ "grad_norm": 0.8403372189641363,
383
+ "learning_rate": 4.93610632138572e-06,
384
+ "loss": 1.2011,
385
+ "num_tokens": 56720582.0,
386
+ "step": 470
387
+ },
388
+ {
389
+ "epoch": 0.3072,
390
+ "grad_norm": 0.9133683374494203,
391
+ "learning_rate": 4.9321689571061314e-06,
392
+ "loss": 1.1863,
393
+ "num_tokens": 57923305.0,
394
+ "step": 480
395
+ },
396
+ {
397
+ "epoch": 0.3136,
398
+ "grad_norm": 0.8342006897685076,
399
+ "learning_rate": 4.928115541302672e-06,
400
+ "loss": 1.1789,
401
+ "num_tokens": 59119131.0,
402
+ "step": 490
403
+ },
404
+ {
405
+ "epoch": 0.32,
406
+ "grad_norm": 0.9237208555707096,
407
+ "learning_rate": 4.923946267385043e-06,
408
+ "loss": 1.1823,
409
+ "num_tokens": 60323216.0,
410
+ "step": 500
411
+ },
412
+ {
413
+ "epoch": 0.3264,
414
+ "grad_norm": 1.138961215949811,
415
+ "learning_rate": 4.91966133429115e-06,
416
+ "loss": 1.1849,
417
+ "num_tokens": 61536243.0,
418
+ "step": 510
419
+ },
420
+ {
421
+ "epoch": 0.3328,
422
+ "grad_norm": 0.8179215725319021,
423
+ "learning_rate": 4.915260946477601e-06,
424
+ "loss": 1.1689,
425
+ "num_tokens": 62725558.0,
426
+ "step": 520
427
+ },
428
+ {
429
+ "epoch": 0.3392,
430
+ "grad_norm": 0.8196458509991646,
431
+ "learning_rate": 4.910745313909953e-06,
432
+ "loss": 1.1754,
433
+ "num_tokens": 63929035.0,
434
+ "step": 530
435
+ },
436
+ {
437
+ "epoch": 0.3456,
438
+ "grad_norm": 0.8606903543941481,
439
+ "learning_rate": 4.906114652052694e-06,
440
+ "loss": 1.1608,
441
+ "num_tokens": 65137799.0,
442
+ "step": 540
443
+ },
444
+ {
445
+ "epoch": 0.352,
446
+ "grad_norm": 0.842427893289404,
447
+ "learning_rate": 4.9013691818589635e-06,
448
+ "loss": 1.176,
449
+ "num_tokens": 66343119.0,
450
+ "step": 550
451
+ },
452
+ {
453
+ "epoch": 0.3584,
454
+ "grad_norm": 0.9536458222010928,
455
+ "learning_rate": 4.896509129760008e-06,
456
+ "loss": 1.1766,
457
+ "num_tokens": 67554625.0,
458
+ "step": 560
459
+ },
460
+ {
461
+ "epoch": 0.3648,
462
+ "grad_norm": 0.8456584910416223,
463
+ "learning_rate": 4.891534727654374e-06,
464
+ "loss": 1.1704,
465
+ "num_tokens": 68767553.0,
466
+ "step": 570
467
+ },
468
+ {
469
+ "epoch": 0.3712,
470
+ "grad_norm": 0.825023352714185,
471
+ "learning_rate": 4.886446212896853e-06,
472
+ "loss": 1.1662,
473
+ "num_tokens": 69977707.0,
474
+ "step": 580
475
+ },
476
+ {
477
+ "epoch": 0.3776,
478
+ "grad_norm": 0.8327520829988985,
479
+ "learning_rate": 4.881243828287141e-06,
480
+ "loss": 1.1715,
481
+ "num_tokens": 71189476.0,
482
+ "step": 590
483
+ },
484
+ {
485
+ "epoch": 0.384,
486
+ "grad_norm": 0.840077866672345,
487
+ "learning_rate": 4.875927822058265e-06,
488
+ "loss": 1.1711,
489
+ "num_tokens": 72395847.0,
490
+ "step": 600
491
+ },
492
+ {
493
+ "epoch": 0.3904,
494
+ "grad_norm": 0.8253947193633453,
495
+ "learning_rate": 4.870498447864735e-06,
496
+ "loss": 1.1439,
497
+ "num_tokens": 73594932.0,
498
+ "step": 610
499
+ },
500
+ {
501
+ "epoch": 0.3968,
502
+ "grad_norm": 0.9212419524845424,
503
+ "learning_rate": 4.864955964770442e-06,
504
+ "loss": 1.1643,
505
+ "num_tokens": 74802657.0,
506
+ "step": 620
507
+ },
508
+ {
509
+ "epoch": 0.4032,
510
+ "grad_norm": 0.9296250658068028,
511
+ "learning_rate": 4.859300637236289e-06,
512
+ "loss": 1.1534,
513
+ "num_tokens": 76011529.0,
514
+ "step": 630
515
+ },
516
+ {
517
+ "epoch": 0.4096,
518
+ "grad_norm": 1.057634627530951,
519
+ "learning_rate": 4.853532735107587e-06,
520
+ "loss": 1.1507,
521
+ "num_tokens": 77210334.0,
522
+ "step": 640
523
+ },
524
+ {
525
+ "epoch": 0.416,
526
+ "grad_norm": 0.8097939416205123,
527
+ "learning_rate": 4.847652533601164e-06,
528
+ "loss": 1.1395,
529
+ "num_tokens": 78425328.0,
530
+ "step": 650
531
+ },
532
+ {
533
+ "epoch": 0.4224,
534
+ "grad_norm": 0.8447649876579609,
535
+ "learning_rate": 4.8416603132922425e-06,
536
+ "loss": 1.1378,
537
+ "num_tokens": 79638521.0,
538
+ "step": 660
539
+ },
540
+ {
541
+ "epoch": 0.4288,
542
+ "grad_norm": 0.9421170322416722,
543
+ "learning_rate": 4.83555636010105e-06,
544
+ "loss": 1.1349,
545
+ "num_tokens": 80836868.0,
546
+ "step": 670
547
+ },
548
+ {
549
+ "epoch": 0.4352,
550
+ "grad_norm": 0.9009555407016511,
551
+ "learning_rate": 4.829340965279173e-06,
552
+ "loss": 1.1482,
553
+ "num_tokens": 82050746.0,
554
+ "step": 680
555
+ },
556
+ {
557
+ "epoch": 0.4416,
558
+ "grad_norm": 0.9304718962620818,
559
+ "learning_rate": 4.823014425395662e-06,
560
+ "loss": 1.1535,
561
+ "num_tokens": 83256247.0,
562
+ "step": 690
563
+ },
564
+ {
565
+ "epoch": 0.448,
566
+ "grad_norm": 0.8268029795401431,
567
+ "learning_rate": 4.816577042322883e-06,
568
+ "loss": 1.1625,
569
+ "num_tokens": 84466963.0,
570
+ "step": 700
571
+ },
572
+ {
573
+ "epoch": 0.4544,
574
+ "grad_norm": 0.8118838757785675,
575
+ "learning_rate": 4.810029123222109e-06,
576
+ "loss": 1.1582,
577
+ "num_tokens": 85668747.0,
578
+ "step": 710
579
+ },
580
+ {
581
+ "epoch": 0.4608,
582
+ "grad_norm": 0.8191391458452703,
583
+ "learning_rate": 4.803370980528868e-06,
584
+ "loss": 1.1508,
585
+ "num_tokens": 86869314.0,
586
+ "step": 720
587
+ },
588
+ {
589
+ "epoch": 0.4672,
590
+ "grad_norm": 0.8573356891805307,
591
+ "learning_rate": 4.796602931938031e-06,
592
+ "loss": 1.1367,
593
+ "num_tokens": 88072166.0,
594
+ "step": 730
595
+ },
596
+ {
597
+ "epoch": 0.4736,
598
+ "grad_norm": 0.9130087766709583,
599
+ "learning_rate": 4.789725300388658e-06,
600
+ "loss": 1.1496,
601
+ "num_tokens": 89276560.0,
602
+ "step": 740
603
+ },
604
+ {
605
+ "epoch": 0.48,
606
+ "grad_norm": 0.8756224792489176,
607
+ "learning_rate": 4.782738414048581e-06,
608
+ "loss": 1.1387,
609
+ "num_tokens": 90489167.0,
610
+ "step": 750
611
+ },
612
+ {
613
+ "epoch": 0.4864,
614
+ "grad_norm": 0.8660533049576743,
615
+ "learning_rate": 4.775642606298758e-06,
616
+ "loss": 1.1293,
617
+ "num_tokens": 91699027.0,
618
+ "step": 760
619
+ },
620
+ {
621
+ "epoch": 0.4928,
622
+ "grad_norm": 0.9344747635312723,
623
+ "learning_rate": 4.7684382157173515e-06,
624
+ "loss": 1.1544,
625
+ "num_tokens": 92907904.0,
626
+ "step": 770
627
+ },
628
+ {
629
+ "epoch": 0.4992,
630
+ "grad_norm": 0.8232769483557345,
631
+ "learning_rate": 4.761125586063583e-06,
632
+ "loss": 1.1509,
633
+ "num_tokens": 94108258.0,
634
+ "step": 780
635
+ },
636
+ {
637
+ "epoch": 0.5056,
638
+ "grad_norm": 0.8019044034927749,
639
+ "learning_rate": 4.753705066261326e-06,
640
+ "loss": 1.142,
641
+ "num_tokens": 95319591.0,
642
+ "step": 790
643
+ },
644
+ {
645
+ "epoch": 0.512,
646
+ "grad_norm": 0.8744491818182848,
647
+ "learning_rate": 4.74617701038246e-06,
648
+ "loss": 1.1407,
649
+ "num_tokens": 96527466.0,
650
+ "step": 800
651
+ },
652
+ {
653
+ "epoch": 0.5184,
654
+ "grad_norm": 0.8457377069978257,
655
+ "learning_rate": 4.738541777629971e-06,
656
+ "loss": 1.1454,
657
+ "num_tokens": 97741955.0,
658
+ "step": 810
659
+ },
660
+ {
661
+ "epoch": 0.5248,
662
+ "grad_norm": 0.8367461594303044,
663
+ "learning_rate": 4.730799732320819e-06,
664
+ "loss": 1.1499,
665
+ "num_tokens": 98947846.0,
666
+ "step": 820
667
+ },
668
+ {
669
+ "epoch": 0.5312,
670
+ "grad_norm": 0.8153933334854007,
671
+ "learning_rate": 4.722951243868547e-06,
672
+ "loss": 1.1338,
673
+ "num_tokens": 100149443.0,
674
+ "step": 830
675
+ },
676
+ {
677
+ "epoch": 0.5376,
678
+ "grad_norm": 0.9553883385280855,
679
+ "learning_rate": 4.7149966867656625e-06,
680
+ "loss": 1.1239,
681
+ "num_tokens": 101354489.0,
682
+ "step": 840
683
+ },
684
+ {
685
+ "epoch": 0.544,
686
+ "grad_norm": 0.8020256868069202,
687
+ "learning_rate": 4.706936440565759e-06,
688
+ "loss": 1.1233,
689
+ "num_tokens": 102561908.0,
690
+ "step": 850
691
+ },
692
+ {
693
+ "epoch": 0.5504,
694
+ "grad_norm": 0.8506848444686664,
695
+ "learning_rate": 4.698770889865414e-06,
696
+ "loss": 1.1314,
697
+ "num_tokens": 103765389.0,
698
+ "step": 860
699
+ },
700
+ {
701
+ "epoch": 0.5568,
702
+ "grad_norm": 0.8931807739845334,
703
+ "learning_rate": 4.690500424285833e-06,
704
+ "loss": 1.1367,
705
+ "num_tokens": 104973326.0,
706
+ "step": 870
707
+ },
708
+ {
709
+ "epoch": 0.5632,
710
+ "grad_norm": 0.8498884776316712,
711
+ "learning_rate": 4.682125438454261e-06,
712
+ "loss": 1.1329,
713
+ "num_tokens": 106184942.0,
714
+ "step": 880
715
+ },
716
+ {
717
+ "epoch": 0.5696,
718
+ "grad_norm": 0.8866656591752357,
719
+ "learning_rate": 4.673646331985151e-06,
720
+ "loss": 1.1469,
721
+ "num_tokens": 107391403.0,
722
+ "step": 890
723
+ },
724
+ {
725
+ "epoch": 0.576,
726
+ "grad_norm": 0.8247486140289442,
727
+ "learning_rate": 4.665063509461098e-06,
728
+ "loss": 1.1304,
729
+ "num_tokens": 108599244.0,
730
+ "step": 900
731
+ },
732
+ {
733
+ "epoch": 0.5824,
734
+ "grad_norm": 0.8509584195104843,
735
+ "learning_rate": 4.6563773804135305e-06,
736
+ "loss": 1.1205,
737
+ "num_tokens": 109802767.0,
738
+ "step": 910
739
+ },
740
+ {
741
+ "epoch": 0.5888,
742
+ "grad_norm": 0.9532478448654986,
743
+ "learning_rate": 4.647588359303178e-06,
744
+ "loss": 1.135,
745
+ "num_tokens": 111002144.0,
746
+ "step": 920
747
+ },
748
+ {
749
+ "epoch": 0.5952,
750
+ "grad_norm": 0.795143766492276,
751
+ "learning_rate": 4.638696865500284e-06,
752
+ "loss": 1.133,
753
+ "num_tokens": 112202360.0,
754
+ "step": 930
755
+ },
756
+ {
757
+ "epoch": 0.6016,
758
+ "grad_norm": 0.8884950967785606,
759
+ "learning_rate": 4.629703323264605e-06,
760
+ "loss": 1.1174,
761
+ "num_tokens": 113410661.0,
762
+ "step": 940
763
+ },
764
+ {
765
+ "epoch": 0.608,
766
+ "grad_norm": 0.8094095645216874,
767
+ "learning_rate": 4.62060816172516e-06,
768
+ "loss": 1.1359,
769
+ "num_tokens": 114615154.0,
770
+ "step": 950
771
+ },
772
+ {
773
+ "epoch": 0.6144,
774
+ "grad_norm": 0.8517004319099382,
775
+ "learning_rate": 4.611411814859758e-06,
776
+ "loss": 1.1141,
777
+ "num_tokens": 115826696.0,
778
+ "step": 960
779
+ },
780
+ {
781
+ "epoch": 0.6208,
782
+ "grad_norm": 0.8739388391386897,
783
+ "learning_rate": 4.602114721474293e-06,
784
+ "loss": 1.1204,
785
+ "num_tokens": 117030663.0,
786
+ "step": 970
787
+ },
788
+ {
789
+ "epoch": 0.6272,
790
+ "grad_norm": 1.0126603878935398,
791
+ "learning_rate": 4.592717325181798e-06,
792
+ "loss": 1.1259,
793
+ "num_tokens": 118243461.0,
794
+ "step": 980
795
+ },
796
+ {
797
+ "epoch": 0.6336,
798
+ "grad_norm": 0.7961249459761912,
799
+ "learning_rate": 4.583220074381288e-06,
800
+ "loss": 1.1105,
801
+ "num_tokens": 119444400.0,
802
+ "step": 990
803
+ },
804
+ {
805
+ "epoch": 0.64,
806
+ "grad_norm": 0.8547801323336933,
807
+ "learning_rate": 4.573623422236359e-06,
808
+ "loss": 1.1247,
809
+ "num_tokens": 120646721.0,
810
+ "step": 1000
811
+ },
812
+ {
813
+ "epoch": 0.6464,
814
+ "grad_norm": 0.8827343366608609,
815
+ "learning_rate": 4.563927826653562e-06,
816
+ "loss": 1.1381,
817
+ "num_tokens": 121856814.0,
818
+ "step": 1010
819
+ },
820
+ {
821
+ "epoch": 0.6528,
822
+ "grad_norm": 0.8379604515543791,
823
+ "learning_rate": 4.554133750260561e-06,
824
+ "loss": 1.1038,
825
+ "num_tokens": 123063137.0,
826
+ "step": 1020
827
+ },
828
+ {
829
+ "epoch": 0.6592,
830
+ "grad_norm": 0.9009991930297082,
831
+ "learning_rate": 4.544241660384057e-06,
832
+ "loss": 1.1351,
833
+ "num_tokens": 124281752.0,
834
+ "step": 1030
835
+ },
836
+ {
837
+ "epoch": 0.6656,
838
+ "grad_norm": 0.9398290903202526,
839
+ "learning_rate": 4.534252029027485e-06,
840
+ "loss": 1.132,
841
+ "num_tokens": 125483927.0,
842
+ "step": 1040
843
+ },
844
+ {
845
+ "epoch": 0.672,
846
+ "grad_norm": 0.8135458599046622,
847
+ "learning_rate": 4.5241653328484965e-06,
848
+ "loss": 1.1137,
849
+ "num_tokens": 126688041.0,
850
+ "step": 1050
851
+ },
852
+ {
853
+ "epoch": 0.6784,
854
+ "grad_norm": 0.826631698433715,
855
+ "learning_rate": 4.5139820531362125e-06,
856
+ "loss": 1.1149,
857
+ "num_tokens": 127895497.0,
858
+ "step": 1060
859
+ },
860
+ {
861
+ "epoch": 0.6848,
862
+ "grad_norm": 0.8326760862617015,
863
+ "learning_rate": 4.503702675788263e-06,
864
+ "loss": 1.1082,
865
+ "num_tokens": 129093768.0,
866
+ "step": 1070
867
+ },
868
+ {
869
+ "epoch": 0.6912,
870
+ "grad_norm": 0.8187909661973681,
871
+ "learning_rate": 4.493327691287596e-06,
872
+ "loss": 1.1213,
873
+ "num_tokens": 130296941.0,
874
+ "step": 1080
875
+ },
876
+ {
877
+ "epoch": 0.6976,
878
+ "grad_norm": 0.8758642744013126,
879
+ "learning_rate": 4.482857594679082e-06,
880
+ "loss": 1.1169,
881
+ "num_tokens": 131499785.0,
882
+ "step": 1090
883
+ },
884
+ {
885
+ "epoch": 0.704,
886
+ "grad_norm": 0.9756017880226009,
887
+ "learning_rate": 4.472292885545887e-06,
888
+ "loss": 1.1182,
889
+ "num_tokens": 132704447.0,
890
+ "step": 1100
891
+ },
892
+ {
893
+ "epoch": 0.7104,
894
+ "grad_norm": 0.9918470716003941,
895
+ "learning_rate": 4.4616340679856344e-06,
896
+ "loss": 1.112,
897
+ "num_tokens": 133914148.0,
898
+ "step": 1110
899
+ },
900
+ {
901
+ "epoch": 0.7168,
902
+ "grad_norm": 0.7736509572616426,
903
+ "learning_rate": 4.450881650586354e-06,
904
+ "loss": 1.0948,
905
+ "num_tokens": 135116690.0,
906
+ "step": 1120
907
+ },
908
+ {
909
+ "epoch": 0.7232,
910
+ "grad_norm": 0.8393996918370894,
911
+ "learning_rate": 4.440036146402218e-06,
912
+ "loss": 1.1196,
913
+ "num_tokens": 136325534.0,
914
+ "step": 1130
915
+ },
916
+ {
917
+ "epoch": 0.7296,
918
+ "grad_norm": 0.8283036410858456,
919
+ "learning_rate": 4.429098072929052e-06,
920
+ "loss": 1.1249,
921
+ "num_tokens": 137532058.0,
922
+ "step": 1140
923
+ },
924
+ {
925
+ "epoch": 0.736,
926
+ "grad_norm": 1.0272561438627168,
927
+ "learning_rate": 4.418067952079651e-06,
928
+ "loss": 1.0894,
929
+ "num_tokens": 138742925.0,
930
+ "step": 1150
931
+ },
932
+ {
933
+ "epoch": 0.7424,
934
+ "grad_norm": 0.9457224166686296,
935
+ "learning_rate": 4.40694631015887e-06,
936
+ "loss": 1.1072,
937
+ "num_tokens": 139944361.0,
938
+ "step": 1160
939
+ },
940
+ {
941
+ "epoch": 0.7488,
942
+ "grad_norm": 0.8472242869303449,
943
+ "learning_rate": 4.395733677838515e-06,
944
+ "loss": 1.104,
945
+ "num_tokens": 141145139.0,
946
+ "step": 1170
947
+ },
948
+ {
949
+ "epoch": 0.7552,
950
+ "grad_norm": 0.8369893067934512,
951
+ "learning_rate": 4.384430590132023e-06,
952
+ "loss": 1.1167,
953
+ "num_tokens": 142348857.0,
954
+ "step": 1180
955
+ },
956
+ {
957
+ "epoch": 0.7616,
958
+ "grad_norm": 0.9417838753194914,
959
+ "learning_rate": 4.373037586368925e-06,
960
+ "loss": 1.0952,
961
+ "num_tokens": 143560823.0,
962
+ "step": 1190
963
+ },
964
+ {
965
+ "epoch": 0.768,
966
+ "grad_norm": 0.83199280244184,
967
+ "learning_rate": 4.361555210169126e-06,
968
+ "loss": 1.0969,
969
+ "num_tokens": 144770576.0,
970
+ "step": 1200
971
+ },
972
+ {
973
+ "epoch": 0.7744,
974
+ "grad_norm": 0.8757783495810086,
975
+ "learning_rate": 4.349984009416952e-06,
976
+ "loss": 1.0948,
977
+ "num_tokens": 145978862.0,
978
+ "step": 1210
979
+ },
980
+ {
981
+ "epoch": 0.7808,
982
+ "grad_norm": 0.8374080168936522,
983
+ "learning_rate": 4.3383245362350174e-06,
984
+ "loss": 1.1087,
985
+ "num_tokens": 147191743.0,
986
+ "step": 1220
987
+ },
988
+ {
989
+ "epoch": 0.7872,
990
+ "grad_norm": 0.8702169752217432,
991
+ "learning_rate": 4.326577346957876e-06,
992
+ "loss": 1.1099,
993
+ "num_tokens": 148399289.0,
994
+ "step": 1230
995
+ },
996
+ {
997
+ "epoch": 0.7936,
998
+ "grad_norm": 0.8016984816166285,
999
+ "learning_rate": 4.314743002105473e-06,
1000
+ "loss": 1.1052,
1001
+ "num_tokens": 149602404.0,
1002
+ "step": 1240
1003
+ },
1004
+ {
1005
+ "epoch": 0.8,
1006
+ "grad_norm": 1.0811796381892176,
1007
+ "learning_rate": 4.302822066356408e-06,
1008
+ "loss": 1.0996,
1009
+ "num_tokens": 150811734.0,
1010
+ "step": 1250
1011
+ },
1012
+ {
1013
+ "epoch": 0.8064,
1014
+ "grad_norm": 0.8374755480022819,
1015
+ "learning_rate": 4.290815108520982e-06,
1016
+ "loss": 1.1185,
1017
+ "num_tokens": 152011294.0,
1018
+ "step": 1260
1019
+ },
1020
+ {
1021
+ "epoch": 0.8128,
1022
+ "grad_norm": 0.7904368039438139,
1023
+ "learning_rate": 4.278722701514061e-06,
1024
+ "loss": 1.0992,
1025
+ "num_tokens": 153217258.0,
1026
+ "step": 1270
1027
+ },
1028
+ {
1029
+ "epoch": 0.8192,
1030
+ "grad_norm": 0.785661611999425,
1031
+ "learning_rate": 4.266545422327741e-06,
1032
+ "loss": 1.1208,
1033
+ "num_tokens": 154419838.0,
1034
+ "step": 1280
1035
+ },
1036
+ {
1037
+ "epoch": 0.8256,
1038
+ "grad_norm": 0.8439322755320521,
1039
+ "learning_rate": 4.254283852003813e-06,
1040
+ "loss": 1.1091,
1041
+ "num_tokens": 155626578.0,
1042
+ "step": 1290
1043
+ },
1044
+ {
1045
+ "epoch": 0.832,
1046
+ "grad_norm": 0.8732275622995317,
1047
+ "learning_rate": 4.241938575606038e-06,
1048
+ "loss": 1.0826,
1049
+ "num_tokens": 156825805.0,
1050
+ "step": 1300
1051
+ },
1052
+ {
1053
+ "epoch": 0.8384,
1054
+ "grad_norm": 0.8014980196902037,
1055
+ "learning_rate": 4.229510182192235e-06,
1056
+ "loss": 1.1093,
1057
+ "num_tokens": 158037877.0,
1058
+ "step": 1310
1059
+ },
1060
+ {
1061
+ "epoch": 0.8448,
1062
+ "grad_norm": 0.8106302375207448,
1063
+ "learning_rate": 4.216999264786169e-06,
1064
+ "loss": 1.1073,
1065
+ "num_tokens": 159245106.0,
1066
+ "step": 1320
1067
+ },
1068
+ {
1069
+ "epoch": 0.8512,
1070
+ "grad_norm": 0.9385310776537238,
1071
+ "learning_rate": 4.204406420349259e-06,
1072
+ "loss": 1.1056,
1073
+ "num_tokens": 160456114.0,
1074
+ "step": 1330
1075
+ },
1076
+ {
1077
+ "epoch": 0.8576,
1078
+ "grad_norm": 0.9579249297784465,
1079
+ "learning_rate": 4.191732249752092e-06,
1080
+ "loss": 1.1021,
1081
+ "num_tokens": 161659510.0,
1082
+ "step": 1340
1083
+ },
1084
+ {
1085
+ "epoch": 0.864,
1086
+ "grad_norm": 0.8134490186326385,
1087
+ "learning_rate": 4.178977357745749e-06,
1088
+ "loss": 1.0821,
1089
+ "num_tokens": 162865495.0,
1090
+ "step": 1350
1091
+ },
1092
+ {
1093
+ "epoch": 0.8704,
1094
+ "grad_norm": 0.7943299269230713,
1095
+ "learning_rate": 4.166142352932957e-06,
1096
+ "loss": 1.1065,
1097
+ "num_tokens": 164069925.0,
1098
+ "step": 1360
1099
+ },
1100
+ {
1101
+ "epoch": 0.8768,
1102
+ "grad_norm": 0.8171116530483417,
1103
+ "learning_rate": 4.153227847739041e-06,
1104
+ "loss": 1.0873,
1105
+ "num_tokens": 165272777.0,
1106
+ "step": 1370
1107
+ },
1108
+ {
1109
+ "epoch": 0.8832,
1110
+ "grad_norm": 0.8472827858602203,
1111
+ "learning_rate": 4.140234458382708e-06,
1112
+ "loss": 1.1207,
1113
+ "num_tokens": 166473564.0,
1114
+ "step": 1380
1115
+ },
1116
+ {
1117
+ "epoch": 0.8896,
1118
+ "grad_norm": 0.8254355045966608,
1119
+ "learning_rate": 4.12716280484664e-06,
1120
+ "loss": 1.093,
1121
+ "num_tokens": 167678209.0,
1122
+ "step": 1390
1123
+ },
1124
+ {
1125
+ "epoch": 0.896,
1126
+ "grad_norm": 0.8238773032302608,
1127
+ "learning_rate": 4.114013510847914e-06,
1128
+ "loss": 1.1004,
1129
+ "num_tokens": 168879199.0,
1130
+ "step": 1400
1131
+ },
1132
+ {
1133
+ "epoch": 0.9024,
1134
+ "grad_norm": 0.8035266067408213,
1135
+ "learning_rate": 4.100787203808241e-06,
1136
+ "loss": 1.09,
1137
+ "num_tokens": 170089062.0,
1138
+ "step": 1410
1139
+ },
1140
+ {
1141
+ "epoch": 0.9088,
1142
+ "grad_norm": 0.796684651593008,
1143
+ "learning_rate": 4.0874845148240265e-06,
1144
+ "loss": 1.0923,
1145
+ "num_tokens": 171298354.0,
1146
+ "step": 1420
1147
+ },
1148
+ {
1149
+ "epoch": 0.9152,
1150
+ "grad_norm": 0.7944378162845194,
1151
+ "learning_rate": 4.074106078636259e-06,
1152
+ "loss": 1.0877,
1153
+ "num_tokens": 172502932.0,
1154
+ "step": 1430
1155
+ },
1156
+ {
1157
+ "epoch": 0.9216,
1158
+ "grad_norm": 0.8222630499336689,
1159
+ "learning_rate": 4.0606525336002215e-06,
1160
+ "loss": 1.1069,
1161
+ "num_tokens": 173714359.0,
1162
+ "step": 1440
1163
+ },
1164
+ {
1165
+ "epoch": 0.928,
1166
+ "grad_norm": 0.8284462145945989,
1167
+ "learning_rate": 4.047124521655037e-06,
1168
+ "loss": 1.1063,
1169
+ "num_tokens": 174915024.0,
1170
+ "step": 1450
1171
+ },
1172
+ {
1173
+ "epoch": 0.9344,
1174
+ "grad_norm": 1.1184143246349953,
1175
+ "learning_rate": 4.033522688293033e-06,
1176
+ "loss": 1.0958,
1177
+ "num_tokens": 176121314.0,
1178
+ "step": 1460
1179
+ },
1180
+ {
1181
+ "epoch": 0.9408,
1182
+ "grad_norm": 0.9302956644371011,
1183
+ "learning_rate": 4.019847682528943e-06,
1184
+ "loss": 1.1057,
1185
+ "num_tokens": 177329003.0,
1186
+ "step": 1470
1187
+ },
1188
+ {
1189
+ "epoch": 0.9472,
1190
+ "grad_norm": 0.8315189293207337,
1191
+ "learning_rate": 4.00610015686894e-06,
1192
+ "loss": 1.1021,
1193
+ "num_tokens": 178533383.0,
1194
+ "step": 1480
1195
+ },
1196
+ {
1197
+ "epoch": 0.9536,
1198
+ "grad_norm": 0.780029339050911,
1199
+ "learning_rate": 3.9922807672795015e-06,
1200
+ "loss": 1.1022,
1201
+ "num_tokens": 179737544.0,
1202
+ "step": 1490
1203
+ },
1204
+ {
1205
+ "epoch": 0.96,
1206
+ "grad_norm": 0.8861787669753409,
1207
+ "learning_rate": 3.97839017315611e-06,
1208
+ "loss": 1.1033,
1209
+ "num_tokens": 180941884.0,
1210
+ "step": 1500
1211
+ },
1212
+ {
1213
+ "epoch": 0.9664,
1214
+ "grad_norm": 0.8613329501244571,
1215
+ "learning_rate": 3.964429037291785e-06,
1216
+ "loss": 1.0932,
1217
+ "num_tokens": 182147995.0,
1218
+ "step": 1510
1219
+ },
1220
+ {
1221
+ "epoch": 0.9728,
1222
+ "grad_norm": 0.7767446273299125,
1223
+ "learning_rate": 3.950398025845469e-06,
1224
+ "loss": 1.0764,
1225
+ "num_tokens": 183351238.0,
1226
+ "step": 1520
1227
+ },
1228
+ {
1229
+ "epoch": 0.9792,
1230
+ "grad_norm": 0.7800388177467502,
1231
+ "learning_rate": 3.936297808310229e-06,
1232
+ "loss": 1.0955,
1233
+ "num_tokens": 184559744.0,
1234
+ "step": 1530
1235
+ },
1236
+ {
1237
+ "epoch": 0.9856,
1238
+ "grad_norm": 0.822587499260109,
1239
+ "learning_rate": 3.9221290574813205e-06,
1240
+ "loss": 1.101,
1241
+ "num_tokens": 185771261.0,
1242
+ "step": 1540
1243
+ },
1244
+ {
1245
+ "epoch": 0.992,
1246
+ "grad_norm": 0.7842833667912362,
1247
+ "learning_rate": 3.907892449424081e-06,
1248
+ "loss": 1.0858,
1249
+ "num_tokens": 186988878.0,
1250
+ "step": 1550
1251
+ },
1252
+ {
1253
+ "epoch": 0.9984,
1254
+ "grad_norm": 0.875565650877801,
1255
+ "learning_rate": 3.893588663441669e-06,
1256
+ "loss": 1.1096,
1257
+ "num_tokens": 188198614.0,
1258
+ "step": 1560
1259
+ },
1260
+ {
1261
+ "epoch": 1.00448,
1262
+ "grad_norm": 0.9833099796256903,
1263
+ "learning_rate": 3.8792183820426575e-06,
1264
+ "loss": 1.0518,
1265
+ "num_tokens": 189338860.0,
1266
+ "step": 1570
1267
+ },
1268
+ {
1269
+ "epoch": 1.01088,
1270
+ "grad_norm": 0.9539211061323496,
1271
+ "learning_rate": 3.864782290908462e-06,
1272
+ "loss": 1.0558,
1273
+ "num_tokens": 190541615.0,
1274
+ "step": 1580
1275
+ },
1276
+ {
1277
+ "epoch": 1.01728,
1278
+ "grad_norm": 0.8277557093113368,
1279
+ "learning_rate": 3.850281078860627e-06,
1280
+ "loss": 1.0672,
1281
+ "num_tokens": 191744590.0,
1282
+ "step": 1590
1283
+ },
1284
+ {
1285
+ "epoch": 1.02368,
1286
+ "grad_norm": 0.8095245034674352,
1287
+ "learning_rate": 3.835715437827954e-06,
1288
+ "loss": 1.0555,
1289
+ "num_tokens": 192946831.0,
1290
+ "step": 1600
1291
+ },
1292
+ {
1293
+ "epoch": 1.03008,
1294
+ "grad_norm": 0.8670205092911757,
1295
+ "learning_rate": 3.821086062813492e-06,
1296
+ "loss": 1.0558,
1297
+ "num_tokens": 194153241.0,
1298
+ "step": 1610
1299
+ },
1300
+ {
1301
+ "epoch": 1.03648,
1302
+ "grad_norm": 0.8041612181651476,
1303
+ "learning_rate": 3.806393651861372e-06,
1304
+ "loss": 1.0713,
1305
+ "num_tokens": 195361386.0,
1306
+ "step": 1620
1307
+ },
1308
+ {
1309
+ "epoch": 1.04288,
1310
+ "grad_norm": 0.8201672913405339,
1311
+ "learning_rate": 3.7916389060234964e-06,
1312
+ "loss": 1.0612,
1313
+ "num_tokens": 196570539.0,
1314
+ "step": 1630
1315
+ },
1316
+ {
1317
+ "epoch": 1.04928,
1318
+ "grad_norm": 0.822814114472732,
1319
+ "learning_rate": 3.776822529326097e-06,
1320
+ "loss": 1.0643,
1321
+ "num_tokens": 197758018.0,
1322
+ "step": 1640
1323
+ },
1324
+ {
1325
+ "epoch": 1.05568,
1326
+ "grad_norm": 0.8405563342503541,
1327
+ "learning_rate": 3.7619452287361306e-06,
1328
+ "loss": 1.0576,
1329
+ "num_tokens": 198962473.0,
1330
+ "step": 1650
1331
+ },
1332
+ {
1333
+ "epoch": 1.06208,
1334
+ "grad_norm": 0.8733811946067399,
1335
+ "learning_rate": 3.7470077141275578e-06,
1336
+ "loss": 1.0602,
1337
+ "num_tokens": 200168404.0,
1338
+ "step": 1660
1339
+ },
1340
+ {
1341
+ "epoch": 1.06848,
1342
+ "grad_norm": 0.7810891863766373,
1343
+ "learning_rate": 3.732010698247463e-06,
1344
+ "loss": 1.0429,
1345
+ "num_tokens": 201383921.0,
1346
+ "step": 1670
1347
+ },
1348
+ {
1349
+ "epoch": 1.07488,
1350
+ "grad_norm": 0.8253121322208729,
1351
+ "learning_rate": 3.7169548966820466e-06,
1352
+ "loss": 1.069,
1353
+ "num_tokens": 202590191.0,
1354
+ "step": 1680
1355
+ },
1356
+ {
1357
+ "epoch": 1.08128,
1358
+ "grad_norm": 0.7968885719952052,
1359
+ "learning_rate": 3.7018410278224852e-06,
1360
+ "loss": 1.0661,
1361
+ "num_tokens": 203790064.0,
1362
+ "step": 1690
1363
+ },
1364
+ {
1365
+ "epoch": 1.08768,
1366
+ "grad_norm": 0.7513522866065546,
1367
+ "learning_rate": 3.686669812830648e-06,
1368
+ "loss": 1.0648,
1369
+ "num_tokens": 205004834.0,
1370
+ "step": 1700
1371
+ },
1372
+ {
1373
+ "epoch": 1.09408,
1374
+ "grad_norm": 0.8133897709614188,
1375
+ "learning_rate": 3.671441975604689e-06,
1376
+ "loss": 1.0574,
1377
+ "num_tokens": 206218130.0,
1378
+ "step": 1710
1379
+ },
1380
+ {
1381
+ "epoch": 1.10048,
1382
+ "grad_norm": 0.855169356505383,
1383
+ "learning_rate": 3.6561582427445053e-06,
1384
+ "loss": 1.0652,
1385
+ "num_tokens": 207421774.0,
1386
+ "step": 1720
1387
+ },
1388
+ {
1389
+ "epoch": 1.10688,
1390
+ "grad_norm": 0.7861479775879827,
1391
+ "learning_rate": 3.6408193435170695e-06,
1392
+ "loss": 1.0601,
1393
+ "num_tokens": 208639076.0,
1394
+ "step": 1730
1395
+ },
1396
+ {
1397
+ "epoch": 1.11328,
1398
+ "grad_norm": 0.7759167355223116,
1399
+ "learning_rate": 3.625426009821628e-06,
1400
+ "loss": 1.0515,
1401
+ "num_tokens": 209843506.0,
1402
+ "step": 1740
1403
+ },
1404
+ {
1405
+ "epoch": 1.11968,
1406
+ "grad_norm": 0.7737945956455258,
1407
+ "learning_rate": 3.609978976154784e-06,
1408
+ "loss": 1.0449,
1409
+ "num_tokens": 211053262.0,
1410
+ "step": 1750
1411
+ },
1412
+ {
1413
+ "epoch": 1.12608,
1414
+ "grad_norm": 0.8033895393207562,
1415
+ "learning_rate": 3.594478979575443e-06,
1416
+ "loss": 1.0653,
1417
+ "num_tokens": 212256390.0,
1418
+ "step": 1760
1419
+ },
1420
+ {
1421
+ "epoch": 1.13248,
1422
+ "grad_norm": 0.8687778972426285,
1423
+ "learning_rate": 3.578926759669653e-06,
1424
+ "loss": 1.046,
1425
+ "num_tokens": 213458553.0,
1426
+ "step": 1770
1427
+ },
1428
+ {
1429
+ "epoch": 1.13888,
1430
+ "grad_norm": 0.8146069292073773,
1431
+ "learning_rate": 3.5633230585153093e-06,
1432
+ "loss": 1.0587,
1433
+ "num_tokens": 214667929.0,
1434
+ "step": 1780
1435
+ },
1436
+ {
1437
+ "epoch": 1.14528,
1438
+ "grad_norm": 0.8442869654702855,
1439
+ "learning_rate": 3.5476686206467465e-06,
1440
+ "loss": 1.0476,
1441
+ "num_tokens": 215872854.0,
1442
+ "step": 1790
1443
+ },
1444
+ {
1445
+ "epoch": 1.15168,
1446
+ "grad_norm": 0.8166732673631207,
1447
+ "learning_rate": 3.531964193019214e-06,
1448
+ "loss": 1.0486,
1449
+ "num_tokens": 217084577.0,
1450
+ "step": 1800
1451
+ },
1452
+ {
1453
+ "epoch": 1.15808,
1454
+ "grad_norm": 0.8407184177973456,
1455
+ "learning_rate": 3.5162105249732336e-06,
1456
+ "loss": 1.0446,
1457
+ "num_tokens": 218284006.0,
1458
+ "step": 1810
1459
+ },
1460
+ {
1461
+ "epoch": 1.16448,
1462
+ "grad_norm": 0.7814422822824459,
1463
+ "learning_rate": 3.5004083681988476e-06,
1464
+ "loss": 1.0466,
1465
+ "num_tokens": 219487469.0,
1466
+ "step": 1820
1467
+ },
1468
+ {
1469
+ "epoch": 1.17088,
1470
+ "grad_norm": 0.7953904441180448,
1471
+ "learning_rate": 3.484558476699748e-06,
1472
+ "loss": 1.0539,
1473
+ "num_tokens": 220690881.0,
1474
+ "step": 1830
1475
+ },
1476
+ {
1477
+ "epoch": 1.17728,
1478
+ "grad_norm": 0.8120616693504964,
1479
+ "learning_rate": 3.468661606757301e-06,
1480
+ "loss": 1.0564,
1481
+ "num_tokens": 221898060.0,
1482
+ "step": 1840
1483
+ },
1484
+ {
1485
+ "epoch": 1.18368,
1486
+ "grad_norm": 0.7894301070451438,
1487
+ "learning_rate": 3.45271851689446e-06,
1488
+ "loss": 1.0576,
1489
+ "num_tokens": 223099219.0,
1490
+ "step": 1850
1491
+ },
1492
+ {
1493
+ "epoch": 1.19008,
1494
+ "grad_norm": 0.8628648936847306,
1495
+ "learning_rate": 3.436729967839575e-06,
1496
+ "loss": 1.0697,
1497
+ "num_tokens": 224314472.0,
1498
+ "step": 1860
1499
+ },
1500
+ {
1501
+ "epoch": 1.19648,
1502
+ "grad_norm": 0.8485241964897267,
1503
+ "learning_rate": 3.4206967224900885e-06,
1504
+ "loss": 1.0583,
1505
+ "num_tokens": 225513940.0,
1506
+ "step": 1870
1507
+ },
1508
+ {
1509
+ "epoch": 1.20288,
1510
+ "grad_norm": 0.8019635872502272,
1511
+ "learning_rate": 3.40461954587614e-06,
1512
+ "loss": 1.0484,
1513
+ "num_tokens": 226733560.0,
1514
+ "step": 1880
1515
+ },
1516
+ {
1517
+ "epoch": 1.20928,
1518
+ "grad_norm": 0.8148504625626072,
1519
+ "learning_rate": 3.3884992051240613e-06,
1520
+ "loss": 1.049,
1521
+ "num_tokens": 227946861.0,
1522
+ "step": 1890
1523
+ },
1524
+ {
1525
+ "epoch": 1.21568,
1526
+ "grad_norm": 0.799348761407277,
1527
+ "learning_rate": 3.372336469419767e-06,
1528
+ "loss": 1.0636,
1529
+ "num_tokens": 229149854.0,
1530
+ "step": 1900
1531
+ },
1532
+ {
1533
+ "epoch": 1.22208,
1534
+ "grad_norm": 0.8121058069211242,
1535
+ "learning_rate": 3.35613210997206e-06,
1536
+ "loss": 1.0679,
1537
+ "num_tokens": 230358777.0,
1538
+ "step": 1910
1539
+ },
1540
+ {
1541
+ "epoch": 1.22848,
1542
+ "grad_norm": 0.8225529513521229,
1543
+ "learning_rate": 3.339886899975831e-06,
1544
+ "loss": 1.0455,
1545
+ "num_tokens": 231573319.0,
1546
+ "step": 1920
1547
+ },
1548
+ {
1549
+ "epoch": 1.23488,
1550
+ "grad_norm": 0.7930056234558618,
1551
+ "learning_rate": 3.3236016145751616e-06,
1552
+ "loss": 1.0453,
1553
+ "num_tokens": 232778798.0,
1554
+ "step": 1930
1555
+ },
1556
+ {
1557
+ "epoch": 1.24128,
1558
+ "grad_norm": 0.7824523425714454,
1559
+ "learning_rate": 3.307277030826342e-06,
1560
+ "loss": 1.046,
1561
+ "num_tokens": 233985281.0,
1562
+ "step": 1940
1563
+ },
1564
+ {
1565
+ "epoch": 1.24768,
1566
+ "grad_norm": 1.126385656615945,
1567
+ "learning_rate": 3.290913927660793e-06,
1568
+ "loss": 1.0418,
1569
+ "num_tokens": 235194572.0,
1570
+ "step": 1950
1571
+ },
1572
+ {
1573
+ "epoch": 1.25408,
1574
+ "grad_norm": 0.8230976427574604,
1575
+ "learning_rate": 3.274513085847899e-06,
1576
+ "loss": 1.0596,
1577
+ "num_tokens": 236400915.0,
1578
+ "step": 1960
1579
+ },
1580
+ {
1581
+ "epoch": 1.26048,
1582
+ "grad_norm": 0.7715465448814725,
1583
+ "learning_rate": 3.2580752879577508e-06,
1584
+ "loss": 1.0421,
1585
+ "num_tokens": 237602768.0,
1586
+ "step": 1970
1587
+ },
1588
+ {
1589
+ "epoch": 1.26688,
1590
+ "grad_norm": 0.7604905419126253,
1591
+ "learning_rate": 3.2416013183238105e-06,
1592
+ "loss": 1.0596,
1593
+ "num_tokens": 238810127.0,
1594
+ "step": 1980
1595
+ },
1596
+ {
1597
+ "epoch": 1.27328,
1598
+ "grad_norm": 0.8091857959210363,
1599
+ "learning_rate": 3.22509196300548e-06,
1600
+ "loss": 1.0544,
1601
+ "num_tokens": 240016518.0,
1602
+ "step": 1990
1603
+ },
1604
+ {
1605
+ "epoch": 1.27968,
1606
+ "grad_norm": 0.8428609624878182,
1607
+ "learning_rate": 3.2085480097506015e-06,
1608
+ "loss": 1.0517,
1609
+ "num_tokens": 241224903.0,
1610
+ "step": 2000
1611
+ },
1612
+ {
1613
+ "epoch": 1.2860800000000001,
1614
+ "grad_norm": 0.8167440202916451,
1615
+ "learning_rate": 3.191970247957862e-06,
1616
+ "loss": 1.0607,
1617
+ "num_tokens": 242432829.0,
1618
+ "step": 2010
1619
+ },
1620
+ {
1621
+ "epoch": 1.29248,
1622
+ "grad_norm": 0.843189559655867,
1623
+ "learning_rate": 3.1753594686391343e-06,
1624
+ "loss": 1.0519,
1625
+ "num_tokens": 243643680.0,
1626
+ "step": 2020
1627
+ },
1628
+ {
1629
+ "epoch": 1.29888,
1630
+ "grad_norm": 0.8113193681644453,
1631
+ "learning_rate": 3.158716464381728e-06,
1632
+ "loss": 1.0534,
1633
+ "num_tokens": 244850967.0,
1634
+ "step": 2030
1635
+ },
1636
+ {
1637
+ "epoch": 1.30528,
1638
+ "grad_norm": 0.8238038397216464,
1639
+ "learning_rate": 3.1420420293105753e-06,
1640
+ "loss": 1.0537,
1641
+ "num_tokens": 246055107.0,
1642
+ "step": 2040
1643
+ },
1644
+ {
1645
+ "epoch": 1.31168,
1646
+ "grad_norm": 0.7585161106894139,
1647
+ "learning_rate": 3.1253369590503357e-06,
1648
+ "loss": 1.053,
1649
+ "num_tokens": 247255291.0,
1650
+ "step": 2050
1651
+ },
1652
+ {
1653
+ "epoch": 1.31808,
1654
+ "grad_norm": 0.8358837254742888,
1655
+ "learning_rate": 3.1086020506874352e-06,
1656
+ "loss": 1.0552,
1657
+ "num_tokens": 248472347.0,
1658
+ "step": 2060
1659
+ },
1660
+ {
1661
+ "epoch": 1.3244799999999999,
1662
+ "grad_norm": 0.8248705338889306,
1663
+ "learning_rate": 3.091838102732031e-06,
1664
+ "loss": 1.0547,
1665
+ "num_tokens": 249675791.0,
1666
+ "step": 2070
1667
+ },
1668
+ {
1669
+ "epoch": 1.33088,
1670
+ "grad_norm": 0.8413169777388428,
1671
+ "learning_rate": 3.0750459150799116e-06,
1672
+ "loss": 1.0512,
1673
+ "num_tokens": 250883742.0,
1674
+ "step": 2080
1675
+ },
1676
+ {
1677
+ "epoch": 1.33728,
1678
+ "grad_norm": 0.7773274742980588,
1679
+ "learning_rate": 3.0582262889743304e-06,
1680
+ "loss": 1.0435,
1681
+ "num_tokens": 252092991.0,
1682
+ "step": 2090
1683
+ },
1684
+ {
1685
+ "epoch": 1.34368,
1686
+ "grad_norm": 0.8160134758509259,
1687
+ "learning_rate": 3.0413800269677707e-06,
1688
+ "loss": 1.0617,
1689
+ "num_tokens": 253296187.0,
1690
+ "step": 2100
1691
+ },
1692
+ {
1693
+ "epoch": 1.35008,
1694
+ "grad_norm": 0.8253629381678,
1695
+ "learning_rate": 3.024507932883659e-06,
1696
+ "loss": 1.0467,
1697
+ "num_tokens": 254497531.0,
1698
+ "step": 2110
1699
+ },
1700
+ {
1701
+ "epoch": 1.35648,
1702
+ "grad_norm": 0.8449321081656331,
1703
+ "learning_rate": 3.0076108117779995e-06,
1704
+ "loss": 1.0501,
1705
+ "num_tokens": 255698828.0,
1706
+ "step": 2120
1707
+ },
1708
+ {
1709
+ "epoch": 1.36288,
1710
+ "grad_norm": 0.864074317535777,
1711
+ "learning_rate": 2.9906894699009714e-06,
1712
+ "loss": 1.051,
1713
+ "num_tokens": 256901786.0,
1714
+ "step": 2130
1715
+ },
1716
+ {
1717
+ "epoch": 1.36928,
1718
+ "grad_norm": 0.8545075997582061,
1719
+ "learning_rate": 2.973744714658452e-06,
1720
+ "loss": 1.045,
1721
+ "num_tokens": 258102803.0,
1722
+ "step": 2140
1723
+ },
1724
+ {
1725
+ "epoch": 1.37568,
1726
+ "grad_norm": 0.7950948333995521,
1727
+ "learning_rate": 2.9567773545734917e-06,
1728
+ "loss": 1.0609,
1729
+ "num_tokens": 259309237.0,
1730
+ "step": 2150
1731
+ },
1732
+ {
1733
+ "epoch": 1.38208,
1734
+ "grad_norm": 0.7772992222068908,
1735
+ "learning_rate": 2.9397881992477388e-06,
1736
+ "loss": 1.0529,
1737
+ "num_tokens": 260512534.0,
1738
+ "step": 2160
1739
+ },
1740
+ {
1741
+ "epoch": 1.38848,
1742
+ "grad_norm": 0.8230701809627932,
1743
+ "learning_rate": 2.9227780593228063e-06,
1744
+ "loss": 1.0492,
1745
+ "num_tokens": 261721309.0,
1746
+ "step": 2170
1747
+ },
1748
+ {
1749
+ "epoch": 1.3948800000000001,
1750
+ "grad_norm": 0.803410117521878,
1751
+ "learning_rate": 2.90574774644159e-06,
1752
+ "loss": 1.0341,
1753
+ "num_tokens": 262926754.0,
1754
+ "step": 2180
1755
+ },
1756
+ {
1757
+ "epoch": 1.40128,
1758
+ "grad_norm": 0.9047895349858696,
1759
+ "learning_rate": 2.8886980732095467e-06,
1760
+ "loss": 1.0304,
1761
+ "num_tokens": 264129158.0,
1762
+ "step": 2190
1763
+ },
1764
+ {
1765
+ "epoch": 1.40768,
1766
+ "grad_norm": 0.8048555076981502,
1767
+ "learning_rate": 2.8716298531559133e-06,
1768
+ "loss": 1.0494,
1769
+ "num_tokens": 265332827.0,
1770
+ "step": 2200
1771
+ },
1772
+ {
1773
+ "epoch": 1.41408,
1774
+ "grad_norm": 0.8364957546359483,
1775
+ "learning_rate": 2.8545439006948948e-06,
1776
+ "loss": 1.0423,
1777
+ "num_tokens": 266542306.0,
1778
+ "step": 2210
1779
+ },
1780
+ {
1781
+ "epoch": 1.42048,
1782
+ "grad_norm": 0.7904212151138658,
1783
+ "learning_rate": 2.8374410310868044e-06,
1784
+ "loss": 1.0423,
1785
+ "num_tokens": 267751752.0,
1786
+ "step": 2220
1787
+ },
1788
+ {
1789
+ "epoch": 1.42688,
1790
+ "grad_norm": 0.8434192039931359,
1791
+ "learning_rate": 2.820322060399156e-06,
1792
+ "loss": 1.0471,
1793
+ "num_tokens": 268955655.0,
1794
+ "step": 2230
1795
+ },
1796
+ {
1797
+ "epoch": 1.4332799999999999,
1798
+ "grad_norm": 0.7746642379992007,
1799
+ "learning_rate": 2.803187805467733e-06,
1800
+ "loss": 1.0574,
1801
+ "num_tokens": 270165303.0,
1802
+ "step": 2240
1803
+ },
1804
+ {
1805
+ "epoch": 1.43968,
1806
+ "grad_norm": 0.8462146853078769,
1807
+ "learning_rate": 2.7860390838576125e-06,
1808
+ "loss": 1.0579,
1809
+ "num_tokens": 271371057.0,
1810
+ "step": 2250
1811
+ },
1812
+ {
1813
+ "epoch": 1.44608,
1814
+ "grad_norm": 0.7814911330812998,
1815
+ "learning_rate": 2.7688767138241474e-06,
1816
+ "loss": 1.0374,
1817
+ "num_tokens": 272570562.0,
1818
+ "step": 2260
1819
+ },
1820
+ {
1821
+ "epoch": 1.45248,
1822
+ "grad_norm": 0.7648342437809393,
1823
+ "learning_rate": 2.7517015142739335e-06,
1824
+ "loss": 1.0551,
1825
+ "num_tokens": 273773102.0,
1826
+ "step": 2270
1827
+ },
1828
+ {
1829
+ "epoch": 1.45888,
1830
+ "grad_norm": 0.8135139786141086,
1831
+ "learning_rate": 2.734514304725727e-06,
1832
+ "loss": 1.0431,
1833
+ "num_tokens": 274979458.0,
1834
+ "step": 2280
1835
+ },
1836
+ {
1837
+ "epoch": 1.46528,
1838
+ "grad_norm": 0.8275244446318913,
1839
+ "learning_rate": 2.717315905271344e-06,
1840
+ "loss": 1.0436,
1841
+ "num_tokens": 276180959.0,
1842
+ "step": 2290
1843
+ },
1844
+ {
1845
+ "epoch": 1.47168,
1846
+ "grad_norm": 0.8456585906125247,
1847
+ "learning_rate": 2.700107136536533e-06,
1848
+ "loss": 1.0571,
1849
+ "num_tokens": 277381104.0,
1850
+ "step": 2300
1851
+ },
1852
+ {
1853
+ "epoch": 1.47808,
1854
+ "grad_norm": 0.7676272425904394,
1855
+ "learning_rate": 2.682888819641809e-06,
1856
+ "loss": 1.0454,
1857
+ "num_tokens": 278589355.0,
1858
+ "step": 2310
1859
+ },
1860
+ {
1861
+ "epoch": 1.48448,
1862
+ "grad_norm": 0.7530507207913718,
1863
+ "learning_rate": 2.6656617761632863e-06,
1864
+ "loss": 1.0452,
1865
+ "num_tokens": 279802576.0,
1866
+ "step": 2320
1867
+ },
1868
+ {
1869
+ "epoch": 1.49088,
1870
+ "grad_norm": 0.8099596670334043,
1871
+ "learning_rate": 2.6484268280934674e-06,
1872
+ "loss": 1.0441,
1873
+ "num_tokens": 281010541.0,
1874
+ "step": 2330
1875
+ },
1876
+ {
1877
+ "epoch": 1.49728,
1878
+ "grad_norm": 0.8098629796138991,
1879
+ "learning_rate": 2.631184797802022e-06,
1880
+ "loss": 1.0379,
1881
+ "num_tokens": 282219974.0,
1882
+ "step": 2340
1883
+ },
1884
+ {
1885
+ "epoch": 1.5036800000000001,
1886
+ "grad_norm": 0.8633758780871927,
1887
+ "learning_rate": 2.613936507996554e-06,
1888
+ "loss": 1.0553,
1889
+ "num_tokens": 283423505.0,
1890
+ "step": 2350
1891
+ },
1892
+ {
1893
+ "epoch": 1.5100799999999999,
1894
+ "grad_norm": 0.8494557884878244,
1895
+ "learning_rate": 2.5966827816833393e-06,
1896
+ "loss": 1.034,
1897
+ "num_tokens": 284628594.0,
1898
+ "step": 2360
1899
+ },
1900
+ {
1901
+ "epoch": 1.51648,
1902
+ "grad_norm": 0.8961874351947472,
1903
+ "learning_rate": 2.579424442128057e-06,
1904
+ "loss": 1.0403,
1905
+ "num_tokens": 285839496.0,
1906
+ "step": 2370
1907
+ },
1908
+ {
1909
+ "epoch": 1.52288,
1910
+ "grad_norm": 0.8982519210357097,
1911
+ "learning_rate": 2.562162312816511e-06,
1912
+ "loss": 1.0516,
1913
+ "num_tokens": 287048432.0,
1914
+ "step": 2380
1915
+ },
1916
+ {
1917
+ "epoch": 1.52928,
1918
+ "grad_norm": 0.834174589328149,
1919
+ "learning_rate": 2.544897217415332e-06,
1920
+ "loss": 1.0371,
1921
+ "num_tokens": 288256611.0,
1922
+ "step": 2390
1923
+ },
1924
+ {
1925
+ "epoch": 1.5356800000000002,
1926
+ "grad_norm": 0.7790317392375281,
1927
+ "learning_rate": 2.5276299797326777e-06,
1928
+ "loss": 1.0347,
1929
+ "num_tokens": 289465699.0,
1930
+ "step": 2400
1931
+ },
1932
+ {
1933
+ "epoch": 1.54208,
1934
+ "grad_norm": 0.8113176021935586,
1935
+ "learning_rate": 2.510361423678929e-06,
1936
+ "loss": 1.035,
1937
+ "num_tokens": 290666618.0,
1938
+ "step": 2410
1939
+ },
1940
+ {
1941
+ "epoch": 1.54848,
1942
+ "grad_norm": 0.8175298566784388,
1943
+ "learning_rate": 2.4930923732273683e-06,
1944
+ "loss": 1.0364,
1945
+ "num_tokens": 291864705.0,
1946
+ "step": 2420
1947
+ },
1948
+ {
1949
+ "epoch": 1.55488,
1950
+ "grad_norm": 0.8601137215701125,
1951
+ "learning_rate": 2.4758236523748734e-06,
1952
+ "loss": 1.041,
1953
+ "num_tokens": 293077992.0,
1954
+ "step": 2430
1955
+ },
1956
+ {
1957
+ "epoch": 1.56128,
1958
+ "grad_norm": 0.766342647676912,
1959
+ "learning_rate": 2.4585560851025917e-06,
1960
+ "loss": 1.0448,
1961
+ "num_tokens": 294292270.0,
1962
+ "step": 2440
1963
+ },
1964
+ {
1965
+ "epoch": 1.56768,
1966
+ "grad_norm": 0.8144040865702195,
1967
+ "learning_rate": 2.4412904953366263e-06,
1968
+ "loss": 1.0626,
1969
+ "num_tokens": 295501196.0,
1970
+ "step": 2450
1971
+ },
1972
+ {
1973
+ "epoch": 1.57408,
1974
+ "grad_norm": 0.8426321262317878,
1975
+ "learning_rate": 2.424027706908728e-06,
1976
+ "loss": 1.0361,
1977
+ "num_tokens": 296713375.0,
1978
+ "step": 2460
1979
+ },
1980
+ {
1981
+ "epoch": 1.58048,
1982
+ "grad_norm": 0.870533748148585,
1983
+ "learning_rate": 2.406768543516977e-06,
1984
+ "loss": 1.041,
1985
+ "num_tokens": 297925333.0,
1986
+ "step": 2470
1987
+ },
1988
+ {
1989
+ "epoch": 1.5868799999999998,
1990
+ "grad_norm": 0.813316442312155,
1991
+ "learning_rate": 2.389513828686485e-06,
1992
+ "loss": 1.0337,
1993
+ "num_tokens": 299126955.0,
1994
+ "step": 2480
1995
+ },
1996
+ {
1997
+ "epoch": 1.59328,
1998
+ "grad_norm": 0.8050560504469045,
1999
+ "learning_rate": 2.372264385730099e-06,
2000
+ "loss": 1.0432,
2001
+ "num_tokens": 300336458.0,
2002
+ "step": 2490
2003
+ },
2004
+ {
2005
+ "epoch": 1.59968,
2006
+ "grad_norm": 0.8007073397832749,
2007
+ "learning_rate": 2.355021037709118e-06,
2008
+ "loss": 1.0571,
2009
+ "num_tokens": 301539282.0,
2010
+ "step": 2500
2011
+ },
2012
+ {
2013
+ "epoch": 1.60608,
2014
+ "grad_norm": 0.8259619776886131,
2015
+ "learning_rate": 2.3377846073940207e-06,
2016
+ "loss": 1.0478,
2017
+ "num_tokens": 302743922.0,
2018
+ "step": 2510
2019
+ },
2020
+ {
2021
+ "epoch": 1.6124800000000001,
2022
+ "grad_norm": 0.7857263898091816,
2023
+ "learning_rate": 2.3205559172252052e-06,
2024
+ "loss": 1.0265,
2025
+ "num_tokens": 303945412.0,
2026
+ "step": 2520
2027
+ },
2028
+ {
2029
+ "epoch": 1.6188799999999999,
2030
+ "grad_norm": 0.7830231024473471,
2031
+ "learning_rate": 2.303335789273744e-06,
2032
+ "loss": 1.0424,
2033
+ "num_tokens": 305146555.0,
2034
+ "step": 2530
2035
+ },
2036
+ {
2037
+ "epoch": 1.62528,
2038
+ "grad_norm": 0.773313259484951,
2039
+ "learning_rate": 2.286125045202164e-06,
2040
+ "loss": 1.0435,
2041
+ "num_tokens": 306362219.0,
2042
+ "step": 2540
2043
+ },
2044
+ {
2045
+ "epoch": 1.63168,
2046
+ "grad_norm": 0.8201327055565161,
2047
+ "learning_rate": 2.2689245062252398e-06,
2048
+ "loss": 1.0509,
2049
+ "num_tokens": 307565244.0,
2050
+ "step": 2550
2051
+ },
2052
+ {
2053
+ "epoch": 1.63808,
2054
+ "grad_norm": 0.827602816998628,
2055
+ "learning_rate": 2.2517349930708032e-06,
2056
+ "loss": 1.049,
2057
+ "num_tokens": 308770918.0,
2058
+ "step": 2560
2059
+ },
2060
+ {
2061
+ "epoch": 1.6444800000000002,
2062
+ "grad_norm": 0.7919141547822656,
2063
+ "learning_rate": 2.234557325940589e-06,
2064
+ "loss": 1.0431,
2065
+ "num_tokens": 309984868.0,
2066
+ "step": 2570
2067
+ },
2068
+ {
2069
+ "epoch": 1.65088,
2070
+ "grad_norm": 0.7394357208064606,
2071
+ "learning_rate": 2.2173923244710954e-06,
2072
+ "loss": 1.0312,
2073
+ "num_tokens": 311187334.0,
2074
+ "step": 2580
2075
+ },
2076
+ {
2077
+ "epoch": 1.65728,
2078
+ "grad_norm": 0.785327584034165,
2079
+ "learning_rate": 2.200240807694474e-06,
2080
+ "loss": 1.0353,
2081
+ "num_tokens": 312396234.0,
2082
+ "step": 2590
2083
+ },
2084
+ {
2085
+ "epoch": 1.66368,
2086
+ "grad_norm": 0.8232141872243898,
2087
+ "learning_rate": 2.1831035939994554e-06,
2088
+ "loss": 1.0562,
2089
+ "num_tokens": 313601855.0,
2090
+ "step": 2600
2091
+ },
2092
+ {
2093
+ "epoch": 1.67008,
2094
+ "grad_norm": 0.7833896049344754,
2095
+ "learning_rate": 2.165981501092291e-06,
2096
+ "loss": 1.0407,
2097
+ "num_tokens": 314804262.0,
2098
+ "step": 2610
2099
+ },
2100
+ {
2101
+ "epoch": 1.67648,
2102
+ "grad_norm": 0.7885429615611813,
2103
+ "learning_rate": 2.148875345957741e-06,
2104
+ "loss": 1.0295,
2105
+ "num_tokens": 316005948.0,
2106
+ "step": 2620
2107
+ },
2108
+ {
2109
+ "epoch": 1.68288,
2110
+ "grad_norm": 0.7829739281596803,
2111
+ "learning_rate": 2.131785944820092e-06,
2112
+ "loss": 1.0252,
2113
+ "num_tokens": 317208803.0,
2114
+ "step": 2630
2115
+ },
2116
+ {
2117
+ "epoch": 1.6892800000000001,
2118
+ "grad_norm": 0.7928770034373539,
2119
+ "learning_rate": 2.114714113104211e-06,
2120
+ "loss": 1.0498,
2121
+ "num_tokens": 318416652.0,
2122
+ "step": 2640
2123
+ },
2124
+ {
2125
+ "epoch": 1.6956799999999999,
2126
+ "grad_norm": 0.790850427449215,
2127
+ "learning_rate": 2.097660665396632e-06,
2128
+ "loss": 1.0421,
2129
+ "num_tokens": 319628095.0,
2130
+ "step": 2650
2131
+ },
2132
+ {
2133
+ "epoch": 1.70208,
2134
+ "grad_norm": 0.8023551277637352,
2135
+ "learning_rate": 2.0806264154066946e-06,
2136
+ "loss": 1.0393,
2137
+ "num_tokens": 320828695.0,
2138
+ "step": 2660
2139
+ },
2140
+ {
2141
+ "epoch": 1.70848,
2142
+ "grad_norm": 0.7922577515769408,
2143
+ "learning_rate": 2.0636121759277135e-06,
2144
+ "loss": 1.0485,
2145
+ "num_tokens": 322041475.0,
2146
+ "step": 2670
2147
+ },
2148
+ {
2149
+ "epoch": 1.71488,
2150
+ "grad_norm": 0.7971244397123712,
2151
+ "learning_rate": 2.046618758798197e-06,
2152
+ "loss": 1.0275,
2153
+ "num_tokens": 323243099.0,
2154
+ "step": 2680
2155
+ },
2156
+ {
2157
+ "epoch": 1.7212800000000001,
2158
+ "grad_norm": 0.8040701855401029,
2159
+ "learning_rate": 2.0296469748631113e-06,
2160
+ "loss": 1.0238,
2161
+ "num_tokens": 324448570.0,
2162
+ "step": 2690
2163
+ },
2164
+ {
2165
+ "epoch": 1.7276799999999999,
2166
+ "grad_norm": 0.7586132016898348,
2167
+ "learning_rate": 2.0126976339351883e-06,
2168
+ "loss": 1.0345,
2169
+ "num_tokens": 325656124.0,
2170
+ "step": 2700
2171
+ },
2172
+ {
2173
+ "epoch": 1.73408,
2174
+ "grad_norm": 0.7741130670086324,
2175
+ "learning_rate": 1.995771544756287e-06,
2176
+ "loss": 1.0304,
2177
+ "num_tokens": 326867457.0,
2178
+ "step": 2710
2179
+ },
2180
+ {
2181
+ "epoch": 1.74048,
2182
+ "grad_norm": 0.7603630468965715,
2183
+ "learning_rate": 1.9788695149588027e-06,
2184
+ "loss": 1.0348,
2185
+ "num_tokens": 328069419.0,
2186
+ "step": 2720
2187
+ },
2188
+ {
2189
+ "epoch": 1.74688,
2190
+ "grad_norm": 0.7656701861871694,
2191
+ "learning_rate": 1.9619923510271333e-06,
2192
+ "loss": 1.0337,
2193
+ "num_tokens": 329274913.0,
2194
+ "step": 2730
2195
+ },
2196
+ {
2197
+ "epoch": 1.75328,
2198
+ "grad_norm": 0.7795354061202655,
2199
+ "learning_rate": 1.945140858259195e-06,
2200
+ "loss": 1.0467,
2201
+ "num_tokens": 330497463.0,
2202
+ "step": 2740
2203
+ },
2204
+ {
2205
+ "epoch": 1.75968,
2206
+ "grad_norm": 0.8511581572833524,
2207
+ "learning_rate": 1.928315840727998e-06,
2208
+ "loss": 1.0292,
2209
+ "num_tokens": 331705026.0,
2210
+ "step": 2750
2211
+ },
2212
+ {
2213
+ "epoch": 1.76608,
2214
+ "grad_norm": 0.8185264208105538,
2215
+ "learning_rate": 1.9115181012432795e-06,
2216
+ "loss": 1.0462,
2217
+ "num_tokens": 332910224.0,
2218
+ "step": 2760
2219
+ },
2220
+ {
2221
+ "epoch": 1.77248,
2222
+ "grad_norm": 0.8581339452377109,
2223
+ "learning_rate": 1.8947484413131996e-06,
2224
+ "loss": 1.0344,
2225
+ "num_tokens": 334124736.0,
2226
+ "step": 2770
2227
+ },
2228
+ {
2229
+ "epoch": 1.77888,
2230
+ "grad_norm": 0.8469198844835426,
2231
+ "learning_rate": 1.8780076611060962e-06,
2232
+ "loss": 1.031,
2233
+ "num_tokens": 335328630.0,
2234
+ "step": 2780
2235
+ },
2236
+ {
2237
+ "epoch": 1.78528,
2238
+ "grad_norm": 0.8097233001009885,
2239
+ "learning_rate": 1.861296559412303e-06,
2240
+ "loss": 1.0268,
2241
+ "num_tokens": 336532418.0,
2242
+ "step": 2790
2243
+ },
2244
+ {
2245
+ "epoch": 1.79168,
2246
+ "grad_norm": 0.8477425454150115,
2247
+ "learning_rate": 1.844615933606037e-06,
2248
+ "loss": 1.0311,
2249
+ "num_tokens": 337730246.0,
2250
+ "step": 2800
2251
+ },
2252
+ {
2253
+ "epoch": 1.7980800000000001,
2254
+ "grad_norm": 0.7749925952377877,
2255
+ "learning_rate": 1.8279665796073498e-06,
2256
+ "loss": 1.0415,
2257
+ "num_tokens": 338937460.0,
2258
+ "step": 2810
2259
+ },
2260
+ {
2261
+ "epoch": 1.8044799999999999,
2262
+ "grad_norm": 0.7976261215266267,
2263
+ "learning_rate": 1.8113492918441523e-06,
2264
+ "loss": 1.047,
2265
+ "num_tokens": 340147641.0,
2266
+ "step": 2820
2267
+ },
2268
+ {
2269
+ "epoch": 1.81088,
2270
+ "grad_norm": 0.7733887224457893,
2271
+ "learning_rate": 1.7947648632143075e-06,
2272
+ "loss": 1.0309,
2273
+ "num_tokens": 341352040.0,
2274
+ "step": 2830
2275
+ },
2276
+ {
2277
+ "epoch": 1.81728,
2278
+ "grad_norm": 0.7739175808490624,
2279
+ "learning_rate": 1.7782140850477967e-06,
2280
+ "loss": 1.0518,
2281
+ "num_tokens": 342559891.0,
2282
+ "step": 2840
2283
+ },
2284
+ {
2285
+ "epoch": 1.82368,
2286
+ "grad_norm": 0.797265127895327,
2287
+ "learning_rate": 1.7616977470689605e-06,
2288
+ "loss": 1.0325,
2289
+ "num_tokens": 343774370.0,
2290
+ "step": 2850
2291
+ },
2292
+ {
2293
+ "epoch": 1.8300800000000002,
2294
+ "grad_norm": 0.8443750617770532,
2295
+ "learning_rate": 1.7452166373588185e-06,
2296
+ "loss": 1.021,
2297
+ "num_tokens": 344970302.0,
2298
+ "step": 2860
2299
+ },
2300
+ {
2301
+ "epoch": 1.83648,
2302
+ "grad_norm": 0.8003604596330827,
2303
+ "learning_rate": 1.7287715423174662e-06,
2304
+ "loss": 1.0304,
2305
+ "num_tokens": 346180457.0,
2306
+ "step": 2870
2307
+ },
2308
+ {
2309
+ "epoch": 1.84288,
2310
+ "grad_norm": 0.8376385879621375,
2311
+ "learning_rate": 1.7123632466265483e-06,
2312
+ "loss": 1.0395,
2313
+ "num_tokens": 347385193.0,
2314
+ "step": 2880
2315
+ },
2316
+ {
2317
+ "epoch": 1.84928,
2318
+ "grad_norm": 0.7906644473344662,
2319
+ "learning_rate": 1.69599253321182e-06,
2320
+ "loss": 1.0413,
2321
+ "num_tokens": 348601710.0,
2322
+ "step": 2890
2323
+ },
2324
+ {
2325
+ "epoch": 1.85568,
2326
+ "grad_norm": 0.7924809016265382,
2327
+ "learning_rate": 1.6796601832057905e-06,
2328
+ "loss": 1.0378,
2329
+ "num_tokens": 349806167.0,
2330
+ "step": 2900
2331
+ },
2332
+ {
2333
+ "epoch": 1.86208,
2334
+ "grad_norm": 0.7766495775123572,
2335
+ "learning_rate": 1.6633669759104488e-06,
2336
+ "loss": 1.0264,
2337
+ "num_tokens": 351012043.0,
2338
+ "step": 2910
2339
+ },
2340
+ {
2341
+ "epoch": 1.86848,
2342
+ "grad_norm": 1.3435506252779292,
2343
+ "learning_rate": 1.6471136887600805e-06,
2344
+ "loss": 1.0237,
2345
+ "num_tokens": 352217587.0,
2346
+ "step": 2920
2347
+ },
2348
+ {
2349
+ "epoch": 1.87488,
2350
+ "grad_norm": 0.765607343549468,
2351
+ "learning_rate": 1.6309010972841728e-06,
2352
+ "loss": 1.0382,
2353
+ "num_tokens": 353418821.0,
2354
+ "step": 2930
2355
+ },
2356
+ {
2357
+ "epoch": 1.8812799999999998,
2358
+ "grad_norm": 0.8171820174646456,
2359
+ "learning_rate": 1.614729975070407e-06,
2360
+ "loss": 1.0366,
2361
+ "num_tokens": 354624890.0,
2362
+ "step": 2940
2363
+ },
2364
+ {
2365
+ "epoch": 1.88768,
2366
+ "grad_norm": 0.8064241532835642,
2367
+ "learning_rate": 1.598601093727749e-06,
2368
+ "loss": 1.0361,
2369
+ "num_tokens": 355824991.0,
2370
+ "step": 2950
2371
+ },
2372
+ {
2373
+ "epoch": 1.89408,
2374
+ "grad_norm": 0.7884619306846271,
2375
+ "learning_rate": 1.5825152228496342e-06,
2376
+ "loss": 1.0425,
2377
+ "num_tokens": 357030616.0,
2378
+ "step": 2960
2379
+ },
2380
+ {
2381
+ "epoch": 1.90048,
2382
+ "grad_norm": 0.8265648248850005,
2383
+ "learning_rate": 1.5664731299772401e-06,
2384
+ "loss": 1.0332,
2385
+ "num_tokens": 358234522.0,
2386
+ "step": 2970
2387
+ },
2388
+ {
2389
+ "epoch": 1.9068800000000001,
2390
+ "grad_norm": 0.8092024559268799,
2391
+ "learning_rate": 1.5504755805628677e-06,
2392
+ "loss": 1.0399,
2393
+ "num_tokens": 359443389.0,
2394
+ "step": 2980
2395
+ },
2396
+ {
2397
+ "epoch": 1.9132799999999999,
2398
+ "grad_norm": 0.791864238644019,
2399
+ "learning_rate": 1.5345233379334156e-06,
2400
+ "loss": 1.0289,
2401
+ "num_tokens": 360644258.0,
2402
+ "step": 2990
2403
+ },
2404
+ {
2405
+ "epoch": 1.91968,
2406
+ "grad_norm": 0.8006538523086424,
2407
+ "learning_rate": 1.5186171632539587e-06,
2408
+ "loss": 1.0392,
2409
+ "num_tokens": 361848281.0,
2410
+ "step": 3000
2411
+ },
2412
+ {
2413
+ "epoch": 1.92608,
2414
+ "grad_norm": 0.7852026214667117,
2415
+ "learning_rate": 1.502757815491429e-06,
2416
+ "loss": 1.0301,
2417
+ "num_tokens": 363051672.0,
2418
+ "step": 3010
2419
+ },
2420
+ {
2421
+ "epoch": 1.93248,
2422
+ "grad_norm": 0.7473075275246417,
2423
+ "learning_rate": 1.4869460513784011e-06,
2424
+ "loss": 1.0349,
2425
+ "num_tokens": 364249917.0,
2426
+ "step": 3020
2427
+ },
2428
+ {
2429
+ "epoch": 1.9388800000000002,
2430
+ "grad_norm": 0.7822299185363633,
2431
+ "learning_rate": 1.4711826253769828e-06,
2432
+ "loss": 1.04,
2433
+ "num_tokens": 365456248.0,
2434
+ "step": 3030
2435
+ },
2436
+ {
2437
+ "epoch": 1.94528,
2438
+ "grad_norm": 0.8034434681463449,
2439
+ "learning_rate": 1.4554682896428179e-06,
2440
+ "loss": 1.0379,
2441
+ "num_tokens": 366654881.0,
2442
+ "step": 3040
2443
+ },
2444
+ {
2445
+ "epoch": 1.95168,
2446
+ "grad_norm": 0.7768199970864885,
2447
+ "learning_rate": 1.439803793989198e-06,
2448
+ "loss": 1.0241,
2449
+ "num_tokens": 367861348.0,
2450
+ "step": 3050
2451
+ },
2452
+ {
2453
+ "epoch": 1.95808,
2454
+ "grad_norm": 0.8118112910224361,
2455
+ "learning_rate": 1.4241898858512824e-06,
2456
+ "loss": 1.0426,
2457
+ "num_tokens": 369064003.0,
2458
+ "step": 3060
2459
+ },
2460
+ {
2461
+ "epoch": 1.96448,
2462
+ "grad_norm": 0.7744113528953481,
2463
+ "learning_rate": 1.408627310250434e-06,
2464
+ "loss": 1.0414,
2465
+ "num_tokens": 370279324.0,
2466
+ "step": 3070
2467
+ },
2468
+ {
2469
+ "epoch": 1.97088,
2470
+ "grad_norm": 0.7887556630257991,
2471
+ "learning_rate": 1.3931168097586717e-06,
2472
+ "loss": 1.0336,
2473
+ "num_tokens": 371480368.0,
2474
+ "step": 3080
2475
+ },
2476
+ {
2477
+ "epoch": 1.97728,
2478
+ "grad_norm": 0.7640435636356337,
2479
+ "learning_rate": 1.377659124463239e-06,
2480
+ "loss": 1.042,
2481
+ "num_tokens": 372690129.0,
2482
+ "step": 3090
2483
+ },
2484
+ {
2485
+ "epoch": 1.98368,
2486
+ "grad_norm": 0.7603826553278634,
2487
+ "learning_rate": 1.3622549919312902e-06,
2488
+ "loss": 1.0361,
2489
+ "num_tokens": 373902924.0,
2490
+ "step": 3100
2491
+ },
2492
+ {
2493
+ "epoch": 1.9900799999999998,
2494
+ "grad_norm": 0.7599088525071184,
2495
+ "learning_rate": 1.346905147174694e-06,
2496
+ "loss": 1.0193,
2497
+ "num_tokens": 375112585.0,
2498
+ "step": 3110
2499
+ },
2500
+ {
2501
+ "epoch": 1.99648,
2502
+ "grad_norm": 0.7816099568186937,
2503
+ "learning_rate": 1.3316103226149682e-06,
2504
+ "loss": 1.0349,
2505
+ "num_tokens": 376325844.0,
2506
+ "step": 3120
2507
+ },
2508
+ {
2509
+ "epoch": 2.00256,
2510
+ "grad_norm": 0.7532423548597259,
2511
+ "learning_rate": 1.3163712480483255e-06,
2512
+ "loss": 1.0248,
2513
+ "num_tokens": 377473897.0,
2514
+ "step": 3130
2515
+ },
2516
+ {
2517
+ "epoch": 2.00896,
2518
+ "grad_norm": 0.7586660186977321,
2519
+ "learning_rate": 1.3011886506108578e-06,
2520
+ "loss": 1.0107,
2521
+ "num_tokens": 378675832.0,
2522
+ "step": 3140
2523
+ },
2524
+ {
2525
+ "epoch": 2.01536,
2526
+ "grad_norm": 0.7958518507428463,
2527
+ "learning_rate": 1.2860632547438334e-06,
2528
+ "loss": 1.0029,
2529
+ "num_tokens": 379872472.0,
2530
+ "step": 3150
2531
+ },
2532
+ {
2533
+ "epoch": 2.02176,
2534
+ "grad_norm": 0.8017956552207596,
2535
+ "learning_rate": 1.2709957821591384e-06,
2536
+ "loss": 1.0188,
2537
+ "num_tokens": 381071848.0,
2538
+ "step": 3160
2539
+ },
2540
+ {
2541
+ "epoch": 2.02816,
2542
+ "grad_norm": 0.8260326835110341,
2543
+ "learning_rate": 1.2559869518048307e-06,
2544
+ "loss": 1.0134,
2545
+ "num_tokens": 382272368.0,
2546
+ "step": 3170
2547
+ },
2548
+ {
2549
+ "epoch": 2.03456,
2550
+ "grad_norm": 0.845928507883109,
2551
+ "learning_rate": 1.2410374798308442e-06,
2552
+ "loss": 1.0107,
2553
+ "num_tokens": 383480338.0,
2554
+ "step": 3180
2555
+ },
2556
+ {
2557
+ "epoch": 2.04096,
2558
+ "grad_norm": 0.8513825857009242,
2559
+ "learning_rate": 1.2261480795548123e-06,
2560
+ "loss": 1.0099,
2561
+ "num_tokens": 384683907.0,
2562
+ "step": 3190
2563
+ },
2564
+ {
2565
+ "epoch": 2.04736,
2566
+ "grad_norm": 0.7711891823020852,
2567
+ "learning_rate": 1.211319461428032e-06,
2568
+ "loss": 1.0139,
2569
+ "num_tokens": 385889491.0,
2570
+ "step": 3200
2571
+ },
2572
+ {
2573
+ "epoch": 2.05376,
2574
+ "grad_norm": 0.7769167344105451,
2575
+ "learning_rate": 1.1965523330015652e-06,
2576
+ "loss": 1.0092,
2577
+ "num_tokens": 387095853.0,
2578
+ "step": 3210
2579
+ },
2580
+ {
2581
+ "epoch": 2.06016,
2582
+ "grad_norm": 0.7922783527359497,
2583
+ "learning_rate": 1.1818473988924797e-06,
2584
+ "loss": 1.0199,
2585
+ "num_tokens": 388306034.0,
2586
+ "step": 3220
2587
+ },
2588
+ {
2589
+ "epoch": 2.06656,
2590
+ "grad_norm": 0.8009332691587518,
2591
+ "learning_rate": 1.167205360750227e-06,
2592
+ "loss": 1.0185,
2593
+ "num_tokens": 389516647.0,
2594
+ "step": 3230
2595
+ },
2596
+ {
2597
+ "epoch": 2.07296,
2598
+ "grad_norm": 0.7591186989087252,
2599
+ "learning_rate": 1.1526269172231594e-06,
2600
+ "loss": 0.995,
2601
+ "num_tokens": 390724121.0,
2602
+ "step": 3240
2603
+ },
2604
+ {
2605
+ "epoch": 2.07936,
2606
+ "grad_norm": 0.8055729406106343,
2607
+ "learning_rate": 1.1381127639252005e-06,
2608
+ "loss": 1.0109,
2609
+ "num_tokens": 391924857.0,
2610
+ "step": 3250
2611
+ },
2612
+ {
2613
+ "epoch": 2.08576,
2614
+ "grad_norm": 0.7920326568899239,
2615
+ "learning_rate": 1.1236635934026474e-06,
2616
+ "loss": 0.9928,
2617
+ "num_tokens": 393133226.0,
2618
+ "step": 3260
2619
+ },
2620
+ {
2621
+ "epoch": 2.09216,
2622
+ "grad_norm": 0.8095321364071963,
2623
+ "learning_rate": 1.1092800951011283e-06,
2624
+ "loss": 1.0066,
2625
+ "num_tokens": 394338791.0,
2626
+ "step": 3270
2627
+ },
2628
+ {
2629
+ "epoch": 2.09856,
2630
+ "grad_norm": 0.7790939177959936,
2631
+ "learning_rate": 1.0949629553327106e-06,
2632
+ "loss": 1.0144,
2633
+ "num_tokens": 395544646.0,
2634
+ "step": 3280
2635
+ },
2636
+ {
2637
+ "epoch": 2.10496,
2638
+ "grad_norm": 0.9934552993460479,
2639
+ "learning_rate": 1.080712857243143e-06,
2640
+ "loss": 1.0004,
2641
+ "num_tokens": 396744920.0,
2642
+ "step": 3290
2643
+ },
2644
+ {
2645
+ "epoch": 2.11136,
2646
+ "grad_norm": 0.7949729572040324,
2647
+ "learning_rate": 1.0665304807792653e-06,
2648
+ "loss": 1.009,
2649
+ "num_tokens": 397964288.0,
2650
+ "step": 3300
2651
+ },
2652
+ {
2653
+ "epoch": 2.11776,
2654
+ "grad_norm": 0.7652291996158113,
2655
+ "learning_rate": 1.0524165026565655e-06,
2656
+ "loss": 1.007,
2657
+ "num_tokens": 399168969.0,
2658
+ "step": 3310
2659
+ },
2660
+ {
2661
+ "epoch": 2.12416,
2662
+ "grad_norm": 0.7761816653258836,
2663
+ "learning_rate": 1.0383715963268884e-06,
2664
+ "loss": 0.994,
2665
+ "num_tokens": 400373422.0,
2666
+ "step": 3320
2667
+ },
2668
+ {
2669
+ "epoch": 2.13056,
2670
+ "grad_norm": 0.8018173213180155,
2671
+ "learning_rate": 1.0243964319462997e-06,
2672
+ "loss": 1.0134,
2673
+ "num_tokens": 401577043.0,
2674
+ "step": 3330
2675
+ },
2676
+ {
2677
+ "epoch": 2.13696,
2678
+ "grad_norm": 0.7607821844421783,
2679
+ "learning_rate": 1.0104916763431133e-06,
2680
+ "loss": 1.0187,
2681
+ "num_tokens": 402777527.0,
2682
+ "step": 3340
2683
+ },
2684
+ {
2685
+ "epoch": 2.14336,
2686
+ "grad_norm": 0.8327193810047873,
2687
+ "learning_rate": 9.966579929860704e-07,
2688
+ "loss": 1.0249,
2689
+ "num_tokens": 403989663.0,
2690
+ "step": 3350
2691
+ },
2692
+ {
2693
+ "epoch": 2.14976,
2694
+ "grad_norm": 0.7706122440471653,
2695
+ "learning_rate": 9.828960419526818e-07,
2696
+ "loss": 1.0085,
2697
+ "num_tokens": 405198202.0,
2698
+ "step": 3360
2699
+ },
2700
+ {
2701
+ "epoch": 2.15616,
2702
+ "grad_norm": 0.7867703490032154,
2703
+ "learning_rate": 9.69206479897736e-07,
2704
+ "loss": 1.0197,
2705
+ "num_tokens": 406403598.0,
2706
+ "step": 3370
2707
+ },
2708
+ {
2709
+ "epoch": 2.16256,
2710
+ "grad_norm": 0.8249023880860281,
2711
+ "learning_rate": 9.555899600219634e-07,
2712
+ "loss": 1.0274,
2713
+ "num_tokens": 407600213.0,
2714
+ "step": 3380
2715
+ },
2716
+ {
2717
+ "epoch": 2.16896,
2718
+ "grad_norm": 0.7855210183667297,
2719
+ "learning_rate": 9.420471320408669e-07,
2720
+ "loss": 1.0127,
2721
+ "num_tokens": 408811259.0,
2722
+ "step": 3390
2723
+ },
2724
+ {
2725
+ "epoch": 2.17536,
2726
+ "grad_norm": 0.8119007655119803,
2727
+ "learning_rate": 9.28578642153726e-07,
2728
+ "loss": 1.0021,
2729
+ "num_tokens": 410014132.0,
2730
+ "step": 3400
2731
+ },
2732
+ {
2733
+ "epoch": 2.18176,
2734
+ "grad_norm": 0.9205406972397864,
2735
+ "learning_rate": 9.151851330127593e-07,
2736
+ "loss": 1.0126,
2737
+ "num_tokens": 411220727.0,
2738
+ "step": 3410
2739
+ },
2740
+ {
2741
+ "epoch": 2.18816,
2742
+ "grad_norm": 0.8056010049273263,
2743
+ "learning_rate": 9.018672436924605e-07,
2744
+ "loss": 0.9892,
2745
+ "num_tokens": 412425755.0,
2746
+ "step": 3420
2747
+ },
2748
+ {
2749
+ "epoch": 2.19456,
2750
+ "grad_norm": 0.7632161903493846,
2751
+ "learning_rate": 8.886256096591048e-07,
2752
+ "loss": 1.019,
2753
+ "num_tokens": 413631347.0,
2754
+ "step": 3430
2755
+ },
2756
+ {
2757
+ "epoch": 2.20096,
2758
+ "grad_norm": 0.7669268826101938,
2759
+ "learning_rate": 8.754608627404307e-07,
2760
+ "loss": 1.0048,
2761
+ "num_tokens": 414833259.0,
2762
+ "step": 3440
2763
+ },
2764
+ {
2765
+ "epoch": 2.20736,
2766
+ "grad_norm": 0.8361832533605145,
2767
+ "learning_rate": 8.623736310954869e-07,
2768
+ "loss": 1.0221,
2769
+ "num_tokens": 416040472.0,
2770
+ "step": 3450
2771
+ },
2772
+ {
2773
+ "epoch": 2.21376,
2774
+ "grad_norm": 0.7814096537064951,
2775
+ "learning_rate": 8.493645391846642e-07,
2776
+ "loss": 1.0037,
2777
+ "num_tokens": 417245756.0,
2778
+ "step": 3460
2779
+ },
2780
+ {
2781
+ "epoch": 2.22016,
2782
+ "grad_norm": 0.7933425120808404,
2783
+ "learning_rate": 8.364342077398971e-07,
2784
+ "loss": 0.9987,
2785
+ "num_tokens": 418455436.0,
2786
+ "step": 3470
2787
+ },
2788
+ {
2789
+ "epoch": 2.22656,
2790
+ "grad_norm": 0.7659925077465827,
2791
+ "learning_rate": 8.235832537350441e-07,
2792
+ "loss": 0.993,
2793
+ "num_tokens": 419667134.0,
2794
+ "step": 3480
2795
+ },
2796
+ {
2797
+ "epoch": 2.23296,
2798
+ "grad_norm": 0.8187051274632632,
2799
+ "learning_rate": 8.108122903564502e-07,
2800
+ "loss": 1.0028,
2801
+ "num_tokens": 420870725.0,
2802
+ "step": 3490
2803
+ },
2804
+ {
2805
+ "epoch": 2.23936,
2806
+ "grad_norm": 0.7595169446678035,
2807
+ "learning_rate": 7.98121926973692e-07,
2808
+ "loss": 1.0124,
2809
+ "num_tokens": 422076634.0,
2810
+ "step": 3500
2811
+ },
2812
+ {
2813
+ "epoch": 2.24576,
2814
+ "grad_norm": 0.8064753048978947,
2815
+ "learning_rate": 7.855127691104944e-07,
2816
+ "loss": 1.024,
2817
+ "num_tokens": 423284867.0,
2818
+ "step": 3510
2819
+ },
2820
+ {
2821
+ "epoch": 2.25216,
2822
+ "grad_norm": 0.809858814713402,
2823
+ "learning_rate": 7.729854184158411e-07,
2824
+ "loss": 1.0174,
2825
+ "num_tokens": 424493379.0,
2826
+ "step": 3520
2827
+ },
2828
+ {
2829
+ "epoch": 2.25856,
2830
+ "grad_norm": 0.7957945935555317,
2831
+ "learning_rate": 7.605404726352708e-07,
2832
+ "loss": 1.0149,
2833
+ "num_tokens": 425697729.0,
2834
+ "step": 3530
2835
+ },
2836
+ {
2837
+ "epoch": 2.26496,
2838
+ "grad_norm": 0.8194656210162423,
2839
+ "learning_rate": 7.481785255823482e-07,
2840
+ "loss": 0.9972,
2841
+ "num_tokens": 426893908.0,
2842
+ "step": 3540
2843
+ },
2844
+ {
2845
+ "epoch": 2.27136,
2846
+ "grad_norm": 0.7967423955163617,
2847
+ "learning_rate": 7.359001671103361e-07,
2848
+ "loss": 1.0106,
2849
+ "num_tokens": 428092842.0,
2850
+ "step": 3550
2851
+ },
2852
+ {
2853
+ "epoch": 2.27776,
2854
+ "grad_norm": 0.7881164663338793,
2855
+ "learning_rate": 7.237059830840482e-07,
2856
+ "loss": 1.0066,
2857
+ "num_tokens": 429286773.0,
2858
+ "step": 3560
2859
+ },
2860
+ {
2861
+ "epoch": 2.28416,
2862
+ "grad_norm": 0.7903923247778172,
2863
+ "learning_rate": 7.11596555351893e-07,
2864
+ "loss": 1.0111,
2865
+ "num_tokens": 430493341.0,
2866
+ "step": 3570
2867
+ },
2868
+ {
2869
+ "epoch": 2.29056,
2870
+ "grad_norm": 0.770776011448775,
2871
+ "learning_rate": 6.995724617181124e-07,
2872
+ "loss": 0.9923,
2873
+ "num_tokens": 431693370.0,
2874
+ "step": 3580
2875
+ },
2876
+ {
2877
+ "epoch": 2.29696,
2878
+ "grad_norm": 0.7817336774071154,
2879
+ "learning_rate": 6.876342759152121e-07,
2880
+ "loss": 1.0162,
2881
+ "num_tokens": 432901215.0,
2882
+ "step": 3590
2883
+ },
2884
+ {
2885
+ "epoch": 2.30336,
2886
+ "grad_norm": 0.7799297164560258,
2887
+ "learning_rate": 6.757825675765862e-07,
2888
+ "loss": 1.0089,
2889
+ "num_tokens": 434107776.0,
2890
+ "step": 3600
2891
+ },
2892
+ {
2893
+ "epoch": 2.30976,
2894
+ "grad_norm": 0.8399066019292479,
2895
+ "learning_rate": 6.640179022093324e-07,
2896
+ "loss": 1.0104,
2897
+ "num_tokens": 435311152.0,
2898
+ "step": 3610
2899
+ },
2900
+ {
2901
+ "epoch": 2.31616,
2902
+ "grad_norm": 0.8297592147597433,
2903
+ "learning_rate": 6.52340841167276e-07,
2904
+ "loss": 1.0114,
2905
+ "num_tokens": 436513739.0,
2906
+ "step": 3620
2907
+ },
2908
+ {
2909
+ "epoch": 2.32256,
2910
+ "grad_norm": 0.7719279126860086,
2911
+ "learning_rate": 6.407519416241779e-07,
2912
+ "loss": 1.0065,
2913
+ "num_tokens": 437726898.0,
2914
+ "step": 3630
2915
+ },
2916
+ {
2917
+ "epoch": 2.32896,
2918
+ "grad_norm": 0.8045844362641281,
2919
+ "learning_rate": 6.292517565471548e-07,
2920
+ "loss": 1.0097,
2921
+ "num_tokens": 438931660.0,
2922
+ "step": 3640
2923
+ },
2924
+ {
2925
+ "epoch": 2.33536,
2926
+ "grad_norm": 0.7982553698914577,
2927
+ "learning_rate": 6.178408346702882e-07,
2928
+ "loss": 1.0082,
2929
+ "num_tokens": 440137185.0,
2930
+ "step": 3650
2931
+ },
2932
+ {
2933
+ "epoch": 2.34176,
2934
+ "grad_norm": 0.7908405728187465,
2935
+ "learning_rate": 6.065197204684484e-07,
2936
+ "loss": 1.0148,
2937
+ "num_tokens": 441339870.0,
2938
+ "step": 3660
2939
+ },
2940
+ {
2941
+ "epoch": 2.34816,
2942
+ "grad_norm": 0.7738211794516375,
2943
+ "learning_rate": 5.95288954131307e-07,
2944
+ "loss": 1.015,
2945
+ "num_tokens": 442548750.0,
2946
+ "step": 3670
2947
+ },
2948
+ {
2949
+ "epoch": 2.35456,
2950
+ "grad_norm": 0.7925014240523639,
2951
+ "learning_rate": 5.841490715375689e-07,
2952
+ "loss": 1.0146,
2953
+ "num_tokens": 443760356.0,
2954
+ "step": 3680
2955
+ },
2956
+ {
2957
+ "epoch": 2.36096,
2958
+ "grad_norm": 0.7744344940621614,
2959
+ "learning_rate": 5.731006042293983e-07,
2960
+ "loss": 1.0195,
2961
+ "num_tokens": 444963192.0,
2962
+ "step": 3690
2963
+ },
2964
+ {
2965
+ "epoch": 2.36736,
2966
+ "grad_norm": 0.809967543772837,
2967
+ "learning_rate": 5.621440793870564e-07,
2968
+ "loss": 1.0138,
2969
+ "num_tokens": 446161734.0,
2970
+ "step": 3700
2971
+ },
2972
+ {
2973
+ "epoch": 2.37376,
2974
+ "grad_norm": 0.7634003235889771,
2975
+ "learning_rate": 5.512800198037477e-07,
2976
+ "loss": 1.0092,
2977
+ "num_tokens": 447367385.0,
2978
+ "step": 3710
2979
+ },
2980
+ {
2981
+ "epoch": 2.38016,
2982
+ "grad_norm": 0.7694302990943018,
2983
+ "learning_rate": 5.405089438606759e-07,
2984
+ "loss": 1.0183,
2985
+ "num_tokens": 448574222.0,
2986
+ "step": 3720
2987
+ },
2988
+ {
2989
+ "epoch": 2.3865600000000002,
2990
+ "grad_norm": 0.7964969360810369,
2991
+ "learning_rate": 5.298313655023083e-07,
2992
+ "loss": 1.0146,
2993
+ "num_tokens": 449787465.0,
2994
+ "step": 3730
2995
+ },
2996
+ {
2997
+ "epoch": 2.39296,
2998
+ "grad_norm": 0.7826022145337301,
2999
+ "learning_rate": 5.192477942118501e-07,
3000
+ "loss": 1.0059,
3001
+ "num_tokens": 450993609.0,
3002
+ "step": 3740
3003
+ },
3004
+ {
3005
+ "epoch": 2.39936,
3006
+ "grad_norm": 0.7939322826576104,
3007
+ "learning_rate": 5.087587349869396e-07,
3008
+ "loss": 1.016,
3009
+ "num_tokens": 452203974.0,
3010
+ "step": 3750
3011
+ },
3012
+ {
3013
+ "epoch": 2.40576,
3014
+ "grad_norm": 0.7880956603422961,
3015
+ "learning_rate": 4.983646883155479e-07,
3016
+ "loss": 0.9871,
3017
+ "num_tokens": 453406872.0,
3018
+ "step": 3760
3019
+ },
3020
+ {
3021
+ "epoch": 2.41216,
3022
+ "grad_norm": 0.7870741062813569,
3023
+ "learning_rate": 4.880661501520977e-07,
3024
+ "loss": 1.0146,
3025
+ "num_tokens": 454612112.0,
3026
+ "step": 3770
3027
+ },
3028
+ {
3029
+ "epoch": 2.41856,
3030
+ "grad_norm": 0.7757670556350029,
3031
+ "learning_rate": 4.778636118938052e-07,
3032
+ "loss": 1.0043,
3033
+ "num_tokens": 455821550.0,
3034
+ "step": 3780
3035
+ },
3036
+ {
3037
+ "epoch": 2.42496,
3038
+ "grad_norm": 0.764980277323769,
3039
+ "learning_rate": 4.677575603572235e-07,
3040
+ "loss": 1.0037,
3041
+ "num_tokens": 457034119.0,
3042
+ "step": 3790
3043
+ },
3044
+ {
3045
+ "epoch": 2.43136,
3046
+ "grad_norm": 0.7689487131773513,
3047
+ "learning_rate": 4.5774847775501977e-07,
3048
+ "loss": 1.0215,
3049
+ "num_tokens": 458243443.0,
3050
+ "step": 3800
3051
+ },
3052
+ {
3053
+ "epoch": 2.43776,
3054
+ "grad_norm": 0.7835819207262276,
3055
+ "learning_rate": 4.4783684167296645e-07,
3056
+ "loss": 1.0107,
3057
+ "num_tokens": 459449656.0,
3058
+ "step": 3810
3059
+ },
3060
+ {
3061
+ "epoch": 2.44416,
3062
+ "grad_norm": 0.7439227301838608,
3063
+ "learning_rate": 4.38023125047152e-07,
3064
+ "loss": 1.0163,
3065
+ "num_tokens": 460660657.0,
3066
+ "step": 3820
3067
+ },
3068
+ {
3069
+ "epoch": 2.45056,
3070
+ "grad_norm": 0.8141456247124772,
3071
+ "learning_rate": 4.283077961414125e-07,
3072
+ "loss": 1.0073,
3073
+ "num_tokens": 461868305.0,
3074
+ "step": 3830
3075
+ },
3076
+ {
3077
+ "epoch": 2.45696,
3078
+ "grad_norm": 0.7873824030524625,
3079
+ "learning_rate": 4.186913185249936e-07,
3080
+ "loss": 1.0161,
3081
+ "num_tokens": 463067022.0,
3082
+ "step": 3840
3083
+ },
3084
+ {
3085
+ "epoch": 2.4633599999999998,
3086
+ "grad_norm": 0.7651257037667265,
3087
+ "learning_rate": 4.091741510504249e-07,
3088
+ "loss": 1.0054,
3089
+ "num_tokens": 464277276.0,
3090
+ "step": 3850
3091
+ },
3092
+ {
3093
+ "epoch": 2.46976,
3094
+ "grad_norm": 0.7817592356120844,
3095
+ "learning_rate": 3.9975674783163e-07,
3096
+ "loss": 1.0131,
3097
+ "num_tokens": 465486770.0,
3098
+ "step": 3860
3099
+ },
3100
+ {
3101
+ "epoch": 2.47616,
3102
+ "grad_norm": 0.7600628098450863,
3103
+ "learning_rate": 3.904395582222578e-07,
3104
+ "loss": 1.0,
3105
+ "num_tokens": 466688564.0,
3106
+ "step": 3870
3107
+ },
3108
+ {
3109
+ "epoch": 2.48256,
3110
+ "grad_norm": 0.7452814104047683,
3111
+ "learning_rate": 3.81223026794241e-07,
3112
+ "loss": 0.9948,
3113
+ "num_tokens": 467893407.0,
3114
+ "step": 3880
3115
+ },
3116
+ {
3117
+ "epoch": 2.48896,
3118
+ "grad_norm": 0.7886078128816824,
3119
+ "learning_rate": 3.721075933165816e-07,
3120
+ "loss": 1.0255,
3121
+ "num_tokens": 469103315.0,
3122
+ "step": 3890
3123
+ },
3124
+ {
3125
+ "epoch": 2.49536,
3126
+ "grad_norm": 0.7883279810476201,
3127
+ "learning_rate": 3.630936927343695e-07,
3128
+ "loss": 0.9955,
3129
+ "num_tokens": 470304536.0,
3130
+ "step": 3900
3131
+ },
3132
+ {
3133
+ "epoch": 2.50176,
3134
+ "grad_norm": 0.7870530493997763,
3135
+ "learning_rate": 3.541817551480292e-07,
3136
+ "loss": 1.0106,
3137
+ "num_tokens": 471516225.0,
3138
+ "step": 3910
3139
+ },
3140
+ {
3141
+ "epoch": 2.50816,
3142
+ "grad_norm": 0.7913988775198784,
3143
+ "learning_rate": 3.4537220579279497e-07,
3144
+ "loss": 1.0123,
3145
+ "num_tokens": 472723848.0,
3146
+ "step": 3920
3147
+ },
3148
+ {
3149
+ "epoch": 2.51456,
3150
+ "grad_norm": 0.788228042670068,
3151
+ "learning_rate": 3.366654650184217e-07,
3152
+ "loss": 1.0076,
3153
+ "num_tokens": 473927605.0,
3154
+ "step": 3930
3155
+ },
3156
+ {
3157
+ "epoch": 2.52096,
3158
+ "grad_norm": 0.7671091431259203,
3159
+ "learning_rate": 3.2806194826913107e-07,
3160
+ "loss": 1.0054,
3161
+ "num_tokens": 475130341.0,
3162
+ "step": 3940
3163
+ },
3164
+ {
3165
+ "epoch": 2.52736,
3166
+ "grad_norm": 0.7769242999032523,
3167
+ "learning_rate": 3.1956206606378186e-07,
3168
+ "loss": 1.0137,
3169
+ "num_tokens": 476337471.0,
3170
+ "step": 3950
3171
+ },
3172
+ {
3173
+ "epoch": 2.53376,
3174
+ "grad_norm": 0.7761725619806417,
3175
+ "learning_rate": 3.1116622397628886e-07,
3176
+ "loss": 1.0139,
3177
+ "num_tokens": 477546278.0,
3178
+ "step": 3960
3179
+ },
3180
+ {
3181
+ "epoch": 2.54016,
3182
+ "grad_norm": 0.8119517968358277,
3183
+ "learning_rate": 3.0287482261626727e-07,
3184
+ "loss": 1.0112,
3185
+ "num_tokens": 478748834.0,
3186
+ "step": 3970
3187
+ },
3188
+ {
3189
+ "epoch": 2.54656,
3190
+ "grad_norm": 0.7768387486408453,
3191
+ "learning_rate": 2.946882576099164e-07,
3192
+ "loss": 1.0176,
3193
+ "num_tokens": 479951666.0,
3194
+ "step": 3980
3195
+ },
3196
+ {
3197
+ "epoch": 2.55296,
3198
+ "grad_norm": 0.8059661577502851,
3199
+ "learning_rate": 2.8660691958114384e-07,
3200
+ "loss": 1.0192,
3201
+ "num_tokens": 481155740.0,
3202
+ "step": 3990
3203
+ },
3204
+ {
3205
+ "epoch": 2.55936,
3206
+ "grad_norm": 0.7923218074076707,
3207
+ "learning_rate": 2.786311941329298e-07,
3208
+ "loss": 1.0228,
3209
+ "num_tokens": 482362569.0,
3210
+ "step": 4000
3211
+ },
3212
+ {
3213
+ "epoch": 2.56576,
3214
+ "grad_norm": 0.7737100130087119,
3215
+ "learning_rate": 2.70761461828922e-07,
3216
+ "loss": 1.0117,
3217
+ "num_tokens": 483577083.0,
3218
+ "step": 4010
3219
+ },
3220
+ {
3221
+ "epoch": 2.5721600000000002,
3222
+ "grad_norm": 0.8198263737858525,
3223
+ "learning_rate": 2.629980981752803e-07,
3224
+ "loss": 1.0027,
3225
+ "num_tokens": 484785169.0,
3226
+ "step": 4020
3227
+ },
3228
+ {
3229
+ "epoch": 2.57856,
3230
+ "grad_norm": 0.7800117950292567,
3231
+ "learning_rate": 2.5534147360276014e-07,
3232
+ "loss": 1.0061,
3233
+ "num_tokens": 485992637.0,
3234
+ "step": 4030
3235
+ },
3236
+ {
3237
+ "epoch": 2.58496,
3238
+ "grad_norm": 0.7806994703813391,
3239
+ "learning_rate": 2.4779195344903447e-07,
3240
+ "loss": 1.0067,
3241
+ "num_tokens": 487200371.0,
3242
+ "step": 4040
3243
+ },
3244
+ {
3245
+ "epoch": 2.59136,
3246
+ "grad_norm": 0.7967832568550222,
3247
+ "learning_rate": 2.4034989794126494e-07,
3248
+ "loss": 1.005,
3249
+ "num_tokens": 488411438.0,
3250
+ "step": 4050
3251
+ },
3252
+ {
3253
+ "epoch": 2.59776,
3254
+ "grad_norm": 0.7613054637393943,
3255
+ "learning_rate": 2.3301566217891148e-07,
3256
+ "loss": 1.0057,
3257
+ "num_tokens": 489619089.0,
3258
+ "step": 4060
3259
+ },
3260
+ {
3261
+ "epoch": 2.6041600000000003,
3262
+ "grad_norm": 0.8097260832659626,
3263
+ "learning_rate": 2.257895961167886e-07,
3264
+ "loss": 1.0115,
3265
+ "num_tokens": 490822004.0,
3266
+ "step": 4070
3267
+ },
3268
+ {
3269
+ "epoch": 2.61056,
3270
+ "grad_norm": 0.7724807002861569,
3271
+ "learning_rate": 2.18672044548367e-07,
3272
+ "loss": 1.013,
3273
+ "num_tokens": 492031022.0,
3274
+ "step": 4080
3275
+ },
3276
+ {
3277
+ "epoch": 2.6169599999999997,
3278
+ "grad_norm": 0.769701738678788,
3279
+ "learning_rate": 2.1166334708932367e-07,
3280
+ "loss": 1.0097,
3281
+ "num_tokens": 493240890.0,
3282
+ "step": 4090
3283
+ },
3284
+ {
3285
+ "epoch": 2.62336,
3286
+ "grad_norm": 0.76114972582814,
3287
+ "learning_rate": 2.0476383816133594e-07,
3288
+ "loss": 1.0042,
3289
+ "num_tokens": 494453799.0,
3290
+ "step": 4100
3291
+ },
3292
+ {
3293
+ "epoch": 2.62976,
3294
+ "grad_norm": 0.8083057947332605,
3295
+ "learning_rate": 1.9797384697612277e-07,
3296
+ "loss": 1.0044,
3297
+ "num_tokens": 495667359.0,
3298
+ "step": 4110
3299
+ },
3300
+ {
3301
+ "epoch": 2.63616,
3302
+ "grad_norm": 0.7707158865091736,
3303
+ "learning_rate": 1.912936975197388e-07,
3304
+ "loss": 1.0073,
3305
+ "num_tokens": 496881814.0,
3306
+ "step": 4120
3307
+ },
3308
+ {
3309
+ "epoch": 2.64256,
3310
+ "grad_norm": 0.7676478517895791,
3311
+ "learning_rate": 1.8472370853711397e-07,
3312
+ "loss": 1.0187,
3313
+ "num_tokens": 498083665.0,
3314
+ "step": 4130
3315
+ },
3316
+ {
3317
+ "epoch": 2.6489599999999998,
3318
+ "grad_norm": 0.7728314364028435,
3319
+ "learning_rate": 1.7826419351684553e-07,
3320
+ "loss": 0.996,
3321
+ "num_tokens": 499285193.0,
3322
+ "step": 4140
3323
+ },
3324
+ {
3325
+ "epoch": 2.65536,
3326
+ "grad_norm": 0.7787493559807903,
3327
+ "learning_rate": 1.7191546067623772e-07,
3328
+ "loss": 0.9928,
3329
+ "num_tokens": 500495522.0,
3330
+ "step": 4150
3331
+ },
3332
+ {
3333
+ "epoch": 2.66176,
3334
+ "grad_norm": 0.7740957124528121,
3335
+ "learning_rate": 1.656778129465983e-07,
3336
+ "loss": 0.9942,
3337
+ "num_tokens": 501704772.0,
3338
+ "step": 4160
3339
+ },
3340
+ {
3341
+ "epoch": 2.66816,
3342
+ "grad_norm": 0.7834164164129861,
3343
+ "learning_rate": 1.5955154795878086e-07,
3344
+ "loss": 1.0018,
3345
+ "num_tokens": 502908159.0,
3346
+ "step": 4170
3347
+ },
3348
+ {
3349
+ "epoch": 2.67456,
3350
+ "grad_norm": 0.7690261436250733,
3351
+ "learning_rate": 1.5353695802898556e-07,
3352
+ "loss": 0.9966,
3353
+ "num_tokens": 504119578.0,
3354
+ "step": 4180
3355
+ },
3356
+ {
3357
+ "epoch": 2.68096,
3358
+ "grad_norm": 0.7500003508328252,
3359
+ "learning_rate": 1.4763433014481105e-07,
3360
+ "loss": 1.0175,
3361
+ "num_tokens": 505329761.0,
3362
+ "step": 4190
3363
+ },
3364
+ {
3365
+ "epoch": 2.68736,
3366
+ "grad_norm": 0.7619674427912766,
3367
+ "learning_rate": 1.4184394595155887e-07,
3368
+ "loss": 1.0084,
3369
+ "num_tokens": 506541089.0,
3370
+ "step": 4200
3371
+ },
3372
+ {
3373
+ "epoch": 2.69376,
3374
+ "grad_norm": 0.7905928509034632,
3375
+ "learning_rate": 1.3616608173879636e-07,
3376
+ "loss": 1.0077,
3377
+ "num_tokens": 507747398.0,
3378
+ "step": 4210
3379
+ },
3380
+ {
3381
+ "epoch": 2.70016,
3382
+ "grad_norm": 0.7768455409603942,
3383
+ "learning_rate": 1.3060100842717388e-07,
3384
+ "loss": 1.0211,
3385
+ "num_tokens": 508948926.0,
3386
+ "step": 4220
3387
+ },
3388
+ {
3389
+ "epoch": 2.70656,
3390
+ "grad_norm": 0.7650832573151034,
3391
+ "learning_rate": 1.2514899155549625e-07,
3392
+ "loss": 1.0033,
3393
+ "num_tokens": 510157051.0,
3394
+ "step": 4230
3395
+ },
3396
+ {
3397
+ "epoch": 2.71296,
3398
+ "grad_norm": 0.7847880941915708,
3399
+ "learning_rate": 1.1981029126805293e-07,
3400
+ "loss": 1.0025,
3401
+ "num_tokens": 511359623.0,
3402
+ "step": 4240
3403
+ },
3404
+ {
3405
+ "epoch": 2.71936,
3406
+ "grad_norm": 0.8047407028430222,
3407
+ "learning_rate": 1.1458516230220651e-07,
3408
+ "loss": 1.0056,
3409
+ "num_tokens": 512562364.0,
3410
+ "step": 4250
3411
+ },
3412
+ {
3413
+ "epoch": 2.72576,
3414
+ "grad_norm": 0.7894872635799464,
3415
+ "learning_rate": 1.0947385397623522e-07,
3416
+ "loss": 1.0062,
3417
+ "num_tokens": 513767195.0,
3418
+ "step": 4260
3419
+ },
3420
+ {
3421
+ "epoch": 2.73216,
3422
+ "grad_norm": 0.7754271372790722,
3423
+ "learning_rate": 1.0447661017743971e-07,
3424
+ "loss": 0.997,
3425
+ "num_tokens": 514974517.0,
3426
+ "step": 4270
3427
+ },
3428
+ {
3429
+ "epoch": 2.73856,
3430
+ "grad_norm": 0.7746425365371328,
3431
+ "learning_rate": 9.959366935050397e-08,
3432
+ "loss": 0.9987,
3433
+ "num_tokens": 516179935.0,
3434
+ "step": 4280
3435
+ },
3436
+ {
3437
+ "epoch": 2.74496,
3438
+ "grad_norm": 0.7523512554064233,
3439
+ "learning_rate": 9.482526448611807e-08,
3440
+ "loss": 1.0042,
3441
+ "num_tokens": 517387907.0,
3442
+ "step": 4290
3443
+ },
3444
+ {
3445
+ "epoch": 2.75136,
3446
+ "grad_norm": 0.7805940920378595,
3447
+ "learning_rate": 9.017162310986067e-08,
3448
+ "loss": 1.002,
3449
+ "num_tokens": 518595813.0,
3450
+ "step": 4300
3451
+ },
3452
+ {
3453
+ "epoch": 2.75776,
3454
+ "grad_norm": 0.8110259911998368,
3455
+ "learning_rate": 8.563296727134435e-08,
3456
+ "loss": 1.0066,
3457
+ "num_tokens": 519800375.0,
3458
+ "step": 4310
3459
+ },
3460
+ {
3461
+ "epoch": 2.76416,
3462
+ "grad_norm": 0.772256949618178,
3463
+ "learning_rate": 8.120951353361884e-08,
3464
+ "loss": 1.0045,
3465
+ "num_tokens": 521008297.0,
3466
+ "step": 4320
3467
+ },
3468
+ {
3469
+ "epoch": 2.77056,
3470
+ "grad_norm": 0.7629770251408482,
3471
+ "learning_rate": 7.690147296283757e-08,
3472
+ "loss": 1.0007,
3473
+ "num_tokens": 522217337.0,
3474
+ "step": 4330
3475
+ },
3476
+ {
3477
+ "epoch": 2.77696,
3478
+ "grad_norm": 0.750000751925906,
3479
+ "learning_rate": 7.270905111818744e-08,
3480
+ "loss": 1.0044,
3481
+ "num_tokens": 523427534.0,
3482
+ "step": 4340
3483
+ },
3484
+ {
3485
+ "epoch": 2.78336,
3486
+ "grad_norm": 0.7695523347419888,
3487
+ "learning_rate": 6.863244804208053e-08,
3488
+ "loss": 1.0185,
3489
+ "num_tokens": 524629610.0,
3490
+ "step": 4350
3491
+ },
3492
+ {
3493
+ "epoch": 2.7897600000000002,
3494
+ "grad_norm": 0.7594496702512009,
3495
+ "learning_rate": 6.467185825060728e-08,
3496
+ "loss": 1.0132,
3497
+ "num_tokens": 525838628.0,
3498
+ "step": 4360
3499
+ },
3500
+ {
3501
+ "epoch": 2.79616,
3502
+ "grad_norm": 0.774231464389687,
3503
+ "learning_rate": 6.082747072425844e-08,
3504
+ "loss": 0.9923,
3505
+ "num_tokens": 527047256.0,
3506
+ "step": 4370
3507
+ },
3508
+ {
3509
+ "epoch": 2.80256,
3510
+ "grad_norm": 0.7878028776389799,
3511
+ "learning_rate": 5.709946889890461e-08,
3512
+ "loss": 0.9989,
3513
+ "num_tokens": 528251412.0,
3514
+ "step": 4380
3515
+ },
3516
+ {
3517
+ "epoch": 2.80896,
3518
+ "grad_norm": 0.7680845271371904,
3519
+ "learning_rate": 5.348803065704483e-08,
3520
+ "loss": 0.9971,
3521
+ "num_tokens": 529460583.0,
3522
+ "step": 4390
3523
+ },
3524
+ {
3525
+ "epoch": 2.81536,
3526
+ "grad_norm": 0.7710477876974481,
3527
+ "learning_rate": 4.999332831931936e-08,
3528
+ "loss": 1.0097,
3529
+ "num_tokens": 530666949.0,
3530
+ "step": 4400
3531
+ },
3532
+ {
3533
+ "epoch": 2.8217600000000003,
3534
+ "grad_norm": 0.7641864260094089,
3535
+ "learning_rate": 4.6615528636286545e-08,
3536
+ "loss": 1.0083,
3537
+ "num_tokens": 531877350.0,
3538
+ "step": 4410
3539
+ },
3540
+ {
3541
+ "epoch": 2.82816,
3542
+ "grad_norm": 0.7798848074760067,
3543
+ "learning_rate": 4.3354792780467004e-08,
3544
+ "loss": 1.0145,
3545
+ "num_tokens": 533089968.0,
3546
+ "step": 4420
3547
+ },
3548
+ {
3549
+ "epoch": 2.8345599999999997,
3550
+ "grad_norm": 0.7987639919755114,
3551
+ "learning_rate": 4.021127633865196e-08,
3552
+ "loss": 1.0061,
3553
+ "num_tokens": 534295222.0,
3554
+ "step": 4430
3555
+ },
3556
+ {
3557
+ "epoch": 2.84096,
3558
+ "grad_norm": 0.7471470388574258,
3559
+ "learning_rate": 3.718512930448115e-08,
3560
+ "loss": 0.9897,
3561
+ "num_tokens": 535501172.0,
3562
+ "step": 4440
3563
+ },
3564
+ {
3565
+ "epoch": 2.84736,
3566
+ "grad_norm": 0.7784643844597081,
3567
+ "learning_rate": 3.4276496071284084e-08,
3568
+ "loss": 1.0126,
3569
+ "num_tokens": 536697925.0,
3570
+ "step": 4450
3571
+ },
3572
+ {
3573
+ "epoch": 2.85376,
3574
+ "grad_norm": 0.7972370799678196,
3575
+ "learning_rate": 3.148551542519196e-08,
3576
+ "loss": 1.0051,
3577
+ "num_tokens": 537893496.0,
3578
+ "step": 4460
3579
+ },
3580
+ {
3581
+ "epoch": 2.86016,
3582
+ "grad_norm": 0.7691284457736113,
3583
+ "learning_rate": 2.8812320538514348e-08,
3584
+ "loss": 1.0098,
3585
+ "num_tokens": 539102796.0,
3586
+ "step": 4470
3587
+ },
3588
+ {
3589
+ "epoch": 2.8665599999999998,
3590
+ "grad_norm": 0.7982125519739797,
3591
+ "learning_rate": 2.6257038963385106e-08,
3592
+ "loss": 1.0136,
3593
+ "num_tokens": 540316296.0,
3594
+ "step": 4480
3595
+ },
3596
+ {
3597
+ "epoch": 2.87296,
3598
+ "grad_norm": 0.7728520058232545,
3599
+ "learning_rate": 2.3819792625675297e-08,
3600
+ "loss": 1.0149,
3601
+ "num_tokens": 541533670.0,
3602
+ "step": 4490
3603
+ },
3604
+ {
3605
+ "epoch": 2.87936,
3606
+ "grad_norm": 0.7681197599600511,
3607
+ "learning_rate": 2.1500697819178406e-08,
3608
+ "loss": 1.0027,
3609
+ "num_tokens": 542738043.0,
3610
+ "step": 4500
3611
+ },
3612
+ {
3613
+ "epoch": 2.88576,
3614
+ "grad_norm": 0.7510549175746628,
3615
+ "learning_rate": 1.9299865200057556e-08,
3616
+ "loss": 1.0059,
3617
+ "num_tokens": 543947829.0,
3618
+ "step": 4510
3619
+ },
3620
+ {
3621
+ "epoch": 2.89216,
3622
+ "grad_norm": 0.766881311747473,
3623
+ "learning_rate": 1.721739978156778e-08,
3624
+ "loss": 1.0051,
3625
+ "num_tokens": 545163765.0,
3626
+ "step": 4520
3627
+ },
3628
+ {
3629
+ "epoch": 2.89856,
3630
+ "grad_norm": 0.8067610998392601,
3631
+ "learning_rate": 1.5253400929045036e-08,
3632
+ "loss": 0.9998,
3633
+ "num_tokens": 546371420.0,
3634
+ "step": 4530
3635
+ },
3636
+ {
3637
+ "epoch": 2.90496,
3638
+ "grad_norm": 0.7674069412891232,
3639
+ "learning_rate": 1.3407962355164728e-08,
3640
+ "loss": 1.0164,
3641
+ "num_tokens": 547577921.0,
3642
+ "step": 4540
3643
+ },
3644
+ {
3645
+ "epoch": 2.91136,
3646
+ "grad_norm": 0.775004069541473,
3647
+ "learning_rate": 1.1681172115469986e-08,
3648
+ "loss": 1.0034,
3649
+ "num_tokens": 548783680.0,
3650
+ "step": 4550
3651
+ },
3652
+ {
3653
+ "epoch": 2.91776,
3654
+ "grad_norm": 0.7833232261400477,
3655
+ "learning_rate": 1.007311260417032e-08,
3656
+ "loss": 0.9956,
3657
+ "num_tokens": 549988634.0,
3658
+ "step": 4560
3659
+ },
3660
+ {
3661
+ "epoch": 2.92416,
3662
+ "grad_norm": 0.7826676344415344,
3663
+ "learning_rate": 8.583860550210043e-09,
3664
+ "loss": 1.0098,
3665
+ "num_tokens": 551189799.0,
3666
+ "step": 4570
3667
+ },
3668
+ {
3669
+ "epoch": 2.93056,
3670
+ "grad_norm": 0.744986247926951,
3671
+ "learning_rate": 7.213487013607856e-09,
3672
+ "loss": 1.0035,
3673
+ "num_tokens": 552397598.0,
3674
+ "step": 4580
3675
+ },
3676
+ {
3677
+ "epoch": 2.93696,
3678
+ "grad_norm": 0.7521106648563647,
3679
+ "learning_rate": 5.96205738206429e-09,
3680
+ "loss": 1.0043,
3681
+ "num_tokens": 553610771.0,
3682
+ "step": 4590
3683
+ },
3684
+ {
3685
+ "epoch": 2.94336,
3686
+ "grad_norm": 0.8150061917429959,
3687
+ "learning_rate": 4.829631367844201e-09,
3688
+ "loss": 1.0,
3689
+ "num_tokens": 554824637.0,
3690
+ "step": 4600
3691
+ },
3692
+ {
3693
+ "epoch": 2.94976,
3694
+ "grad_norm": 0.772723595238506,
3695
+ "learning_rate": 3.816263004925991e-09,
3696
+ "loss": 1.0082,
3697
+ "num_tokens": 556030923.0,
3698
+ "step": 4610
3699
+ },
3700
+ {
3701
+ "epoch": 2.95616,
3702
+ "grad_norm": 0.768451723737756,
3703
+ "learning_rate": 2.922000646423118e-09,
3704
+ "loss": 0.9922,
3705
+ "num_tokens": 557231653.0,
3706
+ "step": 4620
3707
+ },
3708
+ {
3709
+ "epoch": 2.96256,
3710
+ "grad_norm": 0.7993486350591127,
3711
+ "learning_rate": 2.1468869622781608e-09,
3712
+ "loss": 1.0019,
3713
+ "num_tokens": 558442813.0,
3714
+ "step": 4630
3715
+ },
3716
+ {
3717
+ "epoch": 2.96896,
3718
+ "grad_norm": 1.1239957345324176,
3719
+ "learning_rate": 1.4909589372266719e-09,
3720
+ "loss": 1.001,
3721
+ "num_tokens": 559650373.0,
3722
+ "step": 4640
3723
+ },
3724
+ {
3725
+ "epoch": 2.9753600000000002,
3726
+ "grad_norm": 0.7957668006721109,
3727
+ "learning_rate": 9.542478690305335e-10,
3728
+ "loss": 1.0067,
3729
+ "num_tokens": 560855666.0,
3730
+ "step": 4650
3731
+ },
3732
+ {
3733
+ "epoch": 2.98176,
3734
+ "grad_norm": 0.8079239433679425,
3735
+ "learning_rate": 5.367793669874832e-10,
3736
+ "loss": 0.9969,
3737
+ "num_tokens": 562060878.0,
3738
+ "step": 4660
3739
+ },
3740
+ {
3741
+ "epoch": 2.98816,
3742
+ "grad_norm": 0.7720639449232606,
3743
+ "learning_rate": 2.385733507062615e-10,
3744
+ "loss": 1.0052,
3745
+ "num_tokens": 563260411.0,
3746
+ "step": 4670
3747
+ },
3748
+ {
3749
+ "epoch": 2.99456,
3750
+ "grad_norm": 0.7561377897632978,
3751
+ "learning_rate": 5.964404915903555e-11,
3752
+ "loss": 0.9991,
3753
+ "num_tokens": 564468049.0,
3754
+ "step": 4680
3755
+ },
3756
+ {
3757
+ "epoch": 3.0,
3758
+ "num_tokens": 565489014.0,
3759
+ "step": 4689,
3760
+ "total_flos": 722584728633344.0,
3761
+ "train_loss": 1.0774097926684294,
3762
+ "train_runtime": 15585.9875,
3763
+ "train_samples_per_second": 19.248,
3764
+ "train_steps_per_second": 0.301
3765
+ }
3766
+ ],
3767
+ "logging_steps": 10,
3768
+ "max_steps": 4689,
3769
+ "num_input_tokens_seen": 0,
3770
+ "num_train_epochs": 3,
3771
+ "save_steps": 500,
3772
+ "stateful_callbacks": {
3773
+ "TrainerControl": {
3774
+ "args": {
3775
+ "should_epoch_stop": false,
3776
+ "should_evaluate": false,
3777
+ "should_log": false,
3778
+ "should_save": false,
3779
+ "should_training_stop": false
3780
+ },
3781
+ "attributes": {}
3782
+ }
3783
+ },
3784
+ "total_flos": 722584728633344.0,
3785
+ "train_batch_size": 8,
3786
+ "trial_name": null,
3787
+ "trial_params": null
3788
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:862deab6f828df130b5010e59a587d1aaf586646115c35ab7ba680c5b9433c46
3
+ size 7544
vocab.json ADDED
The diff for this file is too large to render. See raw diff