LPY commited on
Commit
b3e23bb
·
verified ·
1 Parent(s): d665527

Delete pretrain

Browse files
Files changed (32) hide show
  1. pretrain/one_image_layer1_pretrain_3824/config.json +0 -54
  2. pretrain/one_image_layer1_pretrain_3824/generation_config.json +0 -7
  3. pretrain/one_image_layer1_pretrain_3824/model-00001-of-00002.safetensors +0 -3
  4. pretrain/one_image_layer1_pretrain_3824/model-00002-of-00002.safetensors +0 -3
  5. pretrain/one_image_layer1_pretrain_3824/model.safetensors.index.json +0 -620
  6. pretrain/one_image_layer1_pretrain_3824/rng_state_0.pth +0 -3
  7. pretrain/one_image_layer1_pretrain_3824/rng_state_1.pth +0 -3
  8. pretrain/one_image_layer1_pretrain_3824/rng_state_2.pth +0 -3
  9. pretrain/one_image_layer1_pretrain_3824/rng_state_3.pth +0 -3
  10. pretrain/one_image_layer1_pretrain_3824/rng_state_4.pth +0 -3
  11. pretrain/one_image_layer1_pretrain_3824/rng_state_5.pth +0 -3
  12. pretrain/one_image_layer1_pretrain_3824/rng_state_6.pth +0 -3
  13. pretrain/one_image_layer1_pretrain_3824/rng_state_7.pth +0 -3
  14. pretrain/one_image_layer1_pretrain_3824/scheduler.pt +0 -3
  15. pretrain/one_image_layer1_pretrain_3824/trainer_state.json +0 -2708
  16. pretrain/one_image_layer1_pretrain_3824/training_args.bin +0 -3
  17. pretrain/one_image_layer4_pretrain_3824/config.json +0 -54
  18. pretrain/one_image_layer4_pretrain_3824/generation_config.json +0 -7
  19. pretrain/one_image_layer4_pretrain_3824/model-00001-of-00002.safetensors +0 -3
  20. pretrain/one_image_layer4_pretrain_3824/model-00002-of-00002.safetensors +0 -3
  21. pretrain/one_image_layer4_pretrain_3824/model.safetensors.index.json +0 -620
  22. pretrain/one_image_layer4_pretrain_3824/rng_state_0.pth +0 -3
  23. pretrain/one_image_layer4_pretrain_3824/rng_state_1.pth +0 -3
  24. pretrain/one_image_layer4_pretrain_3824/rng_state_2.pth +0 -3
  25. pretrain/one_image_layer4_pretrain_3824/rng_state_3.pth +0 -3
  26. pretrain/one_image_layer4_pretrain_3824/rng_state_4.pth +0 -3
  27. pretrain/one_image_layer4_pretrain_3824/rng_state_5.pth +0 -3
  28. pretrain/one_image_layer4_pretrain_3824/rng_state_6.pth +0 -3
  29. pretrain/one_image_layer4_pretrain_3824/rng_state_7.pth +0 -3
  30. pretrain/one_image_layer4_pretrain_3824/scheduler.pt +0 -3
  31. pretrain/one_image_layer4_pretrain_3824/trainer_state.json +0 -2708
  32. pretrain/one_image_layer4_pretrain_3824/training_args.bin +0 -3
pretrain/one_image_layer1_pretrain_3824/config.json DELETED
@@ -1,54 +0,0 @@
1
- {
2
- "_vocab_size": 257216,
3
- "architectures": [
4
- "RoboPoint_Paligemma"
5
- ],
6
- "bos_token_id": 2,
7
- "eos_token_id": 1,
8
- "hidden_size": 2048,
9
- "image_token_index": 257152,
10
- "model_type": "paligemma",
11
- "pad_token_id": 0,
12
- "projection_dim": 2048,
13
- "text_config": {
14
- "attention_bias": false,
15
- "attention_dropout": 0.0,
16
- "head_dim": 256,
17
- "hidden_act": "gelu_pytorch_tanh",
18
- "hidden_activation": null,
19
- "hidden_size": 2048,
20
- "initializer_range": 0.02,
21
- "intermediate_size": 16384,
22
- "max_position_embeddings": 8192,
23
- "model_type": "gemma",
24
- "num_attention_heads": 8,
25
- "num_hidden_layers": 18,
26
- "num_image_tokens": 256,
27
- "num_key_value_heads": 1,
28
- "rms_norm_eps": 1e-06,
29
- "rope_theta": 10000.0,
30
- "torch_dtype": "bfloat16",
31
- "use_cache": true,
32
- "vocab_size": 257216
33
- },
34
- "torch_dtype": "bfloat16",
35
- "transformers_version": "4.51.3",
36
- "vision_config": {
37
- "attention_dropout": 0.0,
38
- "hidden_act": "gelu_pytorch_tanh",
39
- "hidden_size": 1152,
40
- "image_size": 224,
41
- "intermediate_size": 4304,
42
- "layer_norm_eps": 1e-06,
43
- "model_type": "siglip_vision_model",
44
- "num_attention_heads": 16,
45
- "num_channels": 3,
46
- "num_hidden_layers": 27,
47
- "num_image_tokens": 256,
48
- "patch_size": 14,
49
- "projection_dim": 2048,
50
- "projector_hidden_act": "gelu_fast",
51
- "torch_dtype": "bfloat16",
52
- "vision_use_head": false
53
- }
54
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 2,
4
- "eos_token_id": 1,
5
- "pad_token_id": 0,
6
- "transformers_version": "4.51.3"
7
- }
 
 
 
 
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f886d11a781a35e1397460c841bafb8e030d0a6283ca675d8d5f9ffc0459caf
3
- size 4985048384
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c0b4048db6cb7288573b860367e60c348d23cd0cf4c60e9a744e910f438040a9
3
- size 2099036524
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/model.safetensors.index.json DELETED
@@ -1,620 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 7083997556
4
- },
5
- "weight_map": {
6
- "module.language_model.model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
- "module.language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
- "module.language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
- "module.language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
- "module.language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
- "module.language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
- "module.language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
- "module.language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
- "module.language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
- "module.language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
- "module.language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
- "module.language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
- "module.language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
- "module.language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
- "module.language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
- "module.language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
- "module.language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
- "module.language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
- "module.language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
- "module.language_model.model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
- "module.language_model.model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
- "module.language_model.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
- "module.language_model.model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
- "module.language_model.model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
- "module.language_model.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
- "module.language_model.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
- "module.language_model.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
- "module.language_model.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
- "module.language_model.model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
- "module.language_model.model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
- "module.language_model.model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
- "module.language_model.model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
- "module.language_model.model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
- "module.language_model.model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
- "module.language_model.model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
- "module.language_model.model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
- "module.language_model.model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
- "module.language_model.model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
- "module.language_model.model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
- "module.language_model.model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
- "module.language_model.model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
- "module.language_model.model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
- "module.language_model.model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
- "module.language_model.model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
- "module.language_model.model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
- "module.language_model.model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
- "module.language_model.model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
- "module.language_model.model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
- "module.language_model.model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
- "module.language_model.model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
- "module.language_model.model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
- "module.language_model.model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
- "module.language_model.model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
- "module.language_model.model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
- "module.language_model.model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
- "module.language_model.model.layers.14.input_layernorm.weight": "model-00002-of-00002.safetensors",
62
- "module.language_model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
63
- "module.language_model.model.layers.14.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
64
- "module.language_model.model.layers.14.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
65
- "module.language_model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
66
- "module.language_model.model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
- "module.language_model.model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
- "module.language_model.model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
- "module.language_model.model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
- "module.language_model.model.layers.15.input_layernorm.weight": "model-00002-of-00002.safetensors",
71
- "module.language_model.model.layers.15.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
72
- "module.language_model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
73
- "module.language_model.model.layers.15.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
74
- "module.language_model.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
75
- "module.language_model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
76
- "module.language_model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
77
- "module.language_model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
78
- "module.language_model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
79
- "module.language_model.model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
80
- "module.language_model.model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
81
- "module.language_model.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
82
- "module.language_model.model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
83
- "module.language_model.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
84
- "module.language_model.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
85
- "module.language_model.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
86
- "module.language_model.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
87
- "module.language_model.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
88
- "module.language_model.model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
89
- "module.language_model.model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
90
- "module.language_model.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
91
- "module.language_model.model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
92
- "module.language_model.model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
93
- "module.language_model.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
94
- "module.language_model.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
95
- "module.language_model.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
96
- "module.language_model.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
97
- "module.language_model.model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
98
- "module.language_model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
99
- "module.language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
- "module.language_model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
101
- "module.language_model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
102
- "module.language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
- "module.language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
- "module.language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
105
- "module.language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
106
- "module.language_model.model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
107
- "module.language_model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
108
- "module.language_model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
109
- "module.language_model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
110
- "module.language_model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
111
- "module.language_model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
112
- "module.language_model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
113
- "module.language_model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
114
- "module.language_model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
115
- "module.language_model.model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
- "module.language_model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
- "module.language_model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
- "module.language_model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
- "module.language_model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
- "module.language_model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
- "module.language_model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
- "module.language_model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
- "module.language_model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
- "module.language_model.model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
125
- "module.language_model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
126
- "module.language_model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
127
- "module.language_model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
128
- "module.language_model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
129
- "module.language_model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
130
- "module.language_model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
131
- "module.language_model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
132
- "module.language_model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
133
- "module.language_model.model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
134
- "module.language_model.model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
135
- "module.language_model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
136
- "module.language_model.model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
137
- "module.language_model.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
138
- "module.language_model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
139
- "module.language_model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
140
- "module.language_model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
141
- "module.language_model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
142
- "module.language_model.model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
143
- "module.language_model.model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
144
- "module.language_model.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
145
- "module.language_model.model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
146
- "module.language_model.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
147
- "module.language_model.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
148
- "module.language_model.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
149
- "module.language_model.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
150
- "module.language_model.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
- "module.language_model.model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
152
- "module.language_model.model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
153
- "module.language_model.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
154
- "module.language_model.model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
155
- "module.language_model.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
156
- "module.language_model.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
157
- "module.language_model.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
158
- "module.language_model.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
159
- "module.language_model.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
160
- "module.language_model.model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
161
- "module.language_model.model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
162
- "module.language_model.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
163
- "module.language_model.model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
164
- "module.language_model.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
165
- "module.language_model.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
166
- "module.language_model.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
167
- "module.language_model.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
168
- "module.language_model.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
169
- "module.language_model.model.norm.weight": "model-00002-of-00002.safetensors",
170
- "module.multi_modal_projector.linear.bias": "model-00001-of-00002.safetensors",
171
- "module.multi_modal_projector.linear.weight": "model-00001-of-00002.safetensors",
172
- "module.up0.net_mask.0.bias": "model-00002-of-00002.safetensors",
173
- "module.up0.net_mask.0.weight": "model-00002-of-00002.safetensors",
174
- "module.up0.net_mask.2.bias": "model-00002-of-00002.safetensors",
175
- "module.up0.net_mask.2.weight": "model-00002-of-00002.safetensors",
176
- "module.up0.net_out.0.bias": "model-00002-of-00002.safetensors",
177
- "module.up0.net_out.0.weight": "model-00002-of-00002.safetensors",
178
- "module.up0.net_out.2.bias": "model-00002-of-00002.safetensors",
179
- "module.up0.net_out.2.weight": "model-00002-of-00002.safetensors",
180
- "module.up0.net_out.4.bias": "model-00002-of-00002.safetensors",
181
- "module.up0.net_out.4.weight": "model-00002-of-00002.safetensors",
182
- "module.vision_tower.vision_model.embeddings.patch_embedding.bias": "model-00001-of-00002.safetensors",
183
- "module.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00001-of-00002.safetensors",
184
- "module.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00001-of-00002.safetensors",
185
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00001-of-00002.safetensors",
186
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00001-of-00002.safetensors",
187
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00001-of-00002.safetensors",
188
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00001-of-00002.safetensors",
189
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00001-of-00002.safetensors",
190
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00001-of-00002.safetensors",
191
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00001-of-00002.safetensors",
192
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00001-of-00002.safetensors",
193
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
194
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
195
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
196
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
197
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
198
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
199
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
200
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
201
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00001-of-00002.safetensors",
202
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00001-of-00002.safetensors",
203
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00001-of-00002.safetensors",
204
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00001-of-00002.safetensors",
205
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00001-of-00002.safetensors",
206
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00001-of-00002.safetensors",
207
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00001-of-00002.safetensors",
208
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00001-of-00002.safetensors",
209
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
210
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
212
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
213
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
214
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
215
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
216
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
217
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00001-of-00002.safetensors",
218
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00001-of-00002.safetensors",
219
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00001-of-00002.safetensors",
220
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00001-of-00002.safetensors",
221
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00001-of-00002.safetensors",
222
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00001-of-00002.safetensors",
223
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00001-of-00002.safetensors",
224
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00001-of-00002.safetensors",
225
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
226
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
227
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
228
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
229
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
230
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
231
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
232
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00001-of-00002.safetensors",
234
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00001-of-00002.safetensors",
235
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00001-of-00002.safetensors",
236
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00001-of-00002.safetensors",
237
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00001-of-00002.safetensors",
238
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00001-of-00002.safetensors",
239
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00001-of-00002.safetensors",
240
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00001-of-00002.safetensors",
241
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
242
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
243
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
244
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
245
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
246
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
247
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
248
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
249
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00001-of-00002.safetensors",
250
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00001-of-00002.safetensors",
251
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00001-of-00002.safetensors",
252
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00001-of-00002.safetensors",
253
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00001-of-00002.safetensors",
254
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00001-of-00002.safetensors",
255
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00001-of-00002.safetensors",
256
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00001-of-00002.safetensors",
257
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
258
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
259
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
260
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
261
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
262
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
263
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
264
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
265
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00001-of-00002.safetensors",
266
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00001-of-00002.safetensors",
267
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00001-of-00002.safetensors",
268
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00001-of-00002.safetensors",
269
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00001-of-00002.safetensors",
270
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00001-of-00002.safetensors",
271
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00001-of-00002.safetensors",
272
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00001-of-00002.safetensors",
273
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
274
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
275
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
276
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
277
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
278
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
279
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
280
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
281
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00001-of-00002.safetensors",
282
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00001-of-00002.safetensors",
283
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00001-of-00002.safetensors",
284
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00001-of-00002.safetensors",
285
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00001-of-00002.safetensors",
286
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00001-of-00002.safetensors",
287
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00001-of-00002.safetensors",
288
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00001-of-00002.safetensors",
289
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
290
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
291
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
292
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
293
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
294
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
295
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
296
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
297
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00001-of-00002.safetensors",
298
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00001-of-00002.safetensors",
299
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00001-of-00002.safetensors",
300
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00001-of-00002.safetensors",
301
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00001-of-00002.safetensors",
302
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00001-of-00002.safetensors",
303
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00001-of-00002.safetensors",
304
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00001-of-00002.safetensors",
305
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
306
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
307
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
308
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
309
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
310
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
311
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
312
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
313
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00001-of-00002.safetensors",
314
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00001-of-00002.safetensors",
315
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00001-of-00002.safetensors",
316
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00001-of-00002.safetensors",
317
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00001-of-00002.safetensors",
318
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00001-of-00002.safetensors",
319
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00001-of-00002.safetensors",
320
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00001-of-00002.safetensors",
321
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
322
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
323
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
324
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
325
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
326
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
327
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
328
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
329
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00001-of-00002.safetensors",
330
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00001-of-00002.safetensors",
331
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00001-of-00002.safetensors",
332
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00001-of-00002.safetensors",
333
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00001-of-00002.safetensors",
334
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00001-of-00002.safetensors",
335
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00001-of-00002.safetensors",
336
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00001-of-00002.safetensors",
337
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
338
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
339
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
340
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
341
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
342
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
343
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
344
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
345
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00001-of-00002.safetensors",
346
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00001-of-00002.safetensors",
347
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00001-of-00002.safetensors",
348
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00001-of-00002.safetensors",
349
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00001-of-00002.safetensors",
350
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00001-of-00002.safetensors",
351
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00001-of-00002.safetensors",
352
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00001-of-00002.safetensors",
353
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
354
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
355
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
356
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
357
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
358
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
359
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
360
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
361
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00001-of-00002.safetensors",
362
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00001-of-00002.safetensors",
363
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00001-of-00002.safetensors",
364
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00001-of-00002.safetensors",
365
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00001-of-00002.safetensors",
366
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00001-of-00002.safetensors",
367
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00001-of-00002.safetensors",
368
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00001-of-00002.safetensors",
369
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
370
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
371
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
372
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
373
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
374
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
375
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
376
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
377
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00001-of-00002.safetensors",
378
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00001-of-00002.safetensors",
379
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00001-of-00002.safetensors",
380
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00001-of-00002.safetensors",
381
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00001-of-00002.safetensors",
382
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00001-of-00002.safetensors",
383
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00001-of-00002.safetensors",
384
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00001-of-00002.safetensors",
385
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
386
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
387
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
388
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
389
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
390
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
391
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
392
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
393
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00001-of-00002.safetensors",
394
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00001-of-00002.safetensors",
395
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00001-of-00002.safetensors",
396
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00001-of-00002.safetensors",
397
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00001-of-00002.safetensors",
398
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00001-of-00002.safetensors",
399
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00001-of-00002.safetensors",
400
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00001-of-00002.safetensors",
401
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
402
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
403
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
404
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
405
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
406
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
407
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
408
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
409
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00001-of-00002.safetensors",
410
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00001-of-00002.safetensors",
411
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00001-of-00002.safetensors",
412
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00001-of-00002.safetensors",
413
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00001-of-00002.safetensors",
414
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00001-of-00002.safetensors",
415
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00001-of-00002.safetensors",
416
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00001-of-00002.safetensors",
417
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
418
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
419
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
420
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
421
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
422
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
423
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
424
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
425
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00001-of-00002.safetensors",
426
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00001-of-00002.safetensors",
427
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00001-of-00002.safetensors",
428
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00001-of-00002.safetensors",
429
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00001-of-00002.safetensors",
430
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00001-of-00002.safetensors",
431
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00001-of-00002.safetensors",
432
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00001-of-00002.safetensors",
433
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
434
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
435
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
436
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
437
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
438
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
439
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
440
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
441
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00001-of-00002.safetensors",
442
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00001-of-00002.safetensors",
443
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00001-of-00002.safetensors",
444
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00001-of-00002.safetensors",
445
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00001-of-00002.safetensors",
446
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00001-of-00002.safetensors",
447
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00001-of-00002.safetensors",
448
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00001-of-00002.safetensors",
449
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
450
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
451
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
452
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
453
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
454
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
455
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
456
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
457
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm1.bias": "model-00001-of-00002.safetensors",
458
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm1.weight": "model-00001-of-00002.safetensors",
459
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm2.bias": "model-00001-of-00002.safetensors",
460
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm2.weight": "model-00001-of-00002.safetensors",
461
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00001-of-00002.safetensors",
462
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00001-of-00002.safetensors",
463
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00001-of-00002.safetensors",
464
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00001-of-00002.safetensors",
465
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
466
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
467
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
468
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
469
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
470
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
471
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
472
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
473
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm1.bias": "model-00001-of-00002.safetensors",
474
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm1.weight": "model-00001-of-00002.safetensors",
475
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm2.bias": "model-00001-of-00002.safetensors",
476
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm2.weight": "model-00001-of-00002.safetensors",
477
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00001-of-00002.safetensors",
478
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00001-of-00002.safetensors",
479
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00001-of-00002.safetensors",
480
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00001-of-00002.safetensors",
481
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
482
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
483
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
484
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
485
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
486
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
487
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
488
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
489
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm1.bias": "model-00001-of-00002.safetensors",
490
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm1.weight": "model-00001-of-00002.safetensors",
491
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm2.bias": "model-00001-of-00002.safetensors",
492
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm2.weight": "model-00001-of-00002.safetensors",
493
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc1.bias": "model-00001-of-00002.safetensors",
494
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc1.weight": "model-00001-of-00002.safetensors",
495
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc2.bias": "model-00001-of-00002.safetensors",
496
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc2.weight": "model-00001-of-00002.safetensors",
497
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
498
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
499
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
500
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
501
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
502
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
503
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
504
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
505
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00001-of-00002.safetensors",
506
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00001-of-00002.safetensors",
507
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00001-of-00002.safetensors",
508
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00001-of-00002.safetensors",
509
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00001-of-00002.safetensors",
510
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00001-of-00002.safetensors",
511
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00001-of-00002.safetensors",
512
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00001-of-00002.safetensors",
513
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
514
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
515
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
516
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
517
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
518
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
519
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
520
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
521
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00001-of-00002.safetensors",
522
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00001-of-00002.safetensors",
523
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00001-of-00002.safetensors",
524
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00001-of-00002.safetensors",
525
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00001-of-00002.safetensors",
526
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00001-of-00002.safetensors",
527
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00001-of-00002.safetensors",
528
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00001-of-00002.safetensors",
529
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
530
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
531
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
532
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
533
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
534
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
535
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
536
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
537
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00001-of-00002.safetensors",
538
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00001-of-00002.safetensors",
539
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00001-of-00002.safetensors",
540
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00001-of-00002.safetensors",
541
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00001-of-00002.safetensors",
542
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00001-of-00002.safetensors",
543
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00001-of-00002.safetensors",
544
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00001-of-00002.safetensors",
545
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
546
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
547
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
548
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
549
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
550
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
551
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
552
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
553
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00001-of-00002.safetensors",
554
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00001-of-00002.safetensors",
555
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00001-of-00002.safetensors",
556
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00001-of-00002.safetensors",
557
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00001-of-00002.safetensors",
558
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00001-of-00002.safetensors",
559
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00001-of-00002.safetensors",
560
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00001-of-00002.safetensors",
561
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
562
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
563
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
564
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
565
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
566
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
567
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
568
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
569
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00001-of-00002.safetensors",
570
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00001-of-00002.safetensors",
571
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00001-of-00002.safetensors",
572
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00001-of-00002.safetensors",
573
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00001-of-00002.safetensors",
574
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00001-of-00002.safetensors",
575
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00001-of-00002.safetensors",
576
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00001-of-00002.safetensors",
577
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
578
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
579
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
580
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
581
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
582
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
583
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
584
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
585
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00001-of-00002.safetensors",
586
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00001-of-00002.safetensors",
587
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00001-of-00002.safetensors",
588
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00001-of-00002.safetensors",
589
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00001-of-00002.safetensors",
590
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00001-of-00002.safetensors",
591
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00001-of-00002.safetensors",
592
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00001-of-00002.safetensors",
593
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
594
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
595
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
596
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
597
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
598
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
599
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
600
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
601
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00001-of-00002.safetensors",
602
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00001-of-00002.safetensors",
603
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00001-of-00002.safetensors",
604
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00001-of-00002.safetensors",
605
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00001-of-00002.safetensors",
606
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00001-of-00002.safetensors",
607
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00001-of-00002.safetensors",
608
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00001-of-00002.safetensors",
609
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
610
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
611
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
612
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
613
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
614
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
615
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
616
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
617
- "module.vision_tower.vision_model.post_layernorm.bias": "model-00001-of-00002.safetensors",
618
- "module.vision_tower.vision_model.post_layernorm.weight": "model-00001-of-00002.safetensors"
619
- }
620
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ee195ebde9bf012f945f068f133e7fe22fef5450c496607e3ef11cc2034a186
3
- size 15984
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4c5aa7a36c64701b647fb2121298f95ea81b8534c49340aef7115cfe5813f215
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_2.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ecc7a6507625983a175b39a25b874b0daa93e57e2878b66b44161722717adb9
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_3.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fd85380aaf3a2b229e71ba6779d40797431662e65fd98354898f93110b8c8599
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_4.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:17c78776402f62ad362e175b0adb6333c63cc325c13ea9acc996799ee2b72998
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_5.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e38c4a1329b40fbbf59c9bfe31e7f8f4d423b0f795efe741115685c773627812
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_6.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:232e5823d58632b2bd4f0a3eaed918a00d1f4a7ad6e6cfd86785c5645c2ea0ab
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/rng_state_7.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c01740a56a86b482251295ab8c6ab97ad9b353e9d7a86b07de2c27911a8dacb7
3
- size 15920
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f9fb39460522fb2411b8e76bc9c22c5f74c649cb79a004c3dc7f9ef8bac6436
3
- size 1000
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/trainer_state.json DELETED
@@ -1,2708 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 8.0,
6
- "eval_steps": 500,
7
- "global_step": 3824,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.02092050209205021,
14
- "grad_norm": 887.3228759765625,
15
- "learning_rate": 1.125e-06,
16
- "loss": 30.9142,
17
- "step": 10
18
- },
19
- {
20
- "epoch": 0.04184100418410042,
21
- "grad_norm": 310.3841247558594,
22
- "learning_rate": 2.375e-06,
23
- "loss": 27.4934,
24
- "step": 20
25
- },
26
- {
27
- "epoch": 0.06276150627615062,
28
- "grad_norm": 282.7115478515625,
29
- "learning_rate": 3.625e-06,
30
- "loss": 25.2525,
31
- "step": 30
32
- },
33
- {
34
- "epoch": 0.08368200836820083,
35
- "grad_norm": 807.3260498046875,
36
- "learning_rate": 4.875000000000001e-06,
37
- "loss": 21.1005,
38
- "step": 40
39
- },
40
- {
41
- "epoch": 0.10460251046025104,
42
- "grad_norm": 179.2369842529297,
43
- "learning_rate": 6.125e-06,
44
- "loss": 18.1834,
45
- "step": 50
46
- },
47
- {
48
- "epoch": 0.12552301255230125,
49
- "grad_norm": 170.45269775390625,
50
- "learning_rate": 7.375e-06,
51
- "loss": 16.0711,
52
- "step": 60
53
- },
54
- {
55
- "epoch": 0.14644351464435146,
56
- "grad_norm": 106.6611328125,
57
- "learning_rate": 8.625e-06,
58
- "loss": 14.1324,
59
- "step": 70
60
- },
61
- {
62
- "epoch": 0.16736401673640167,
63
- "grad_norm": 355.8836669921875,
64
- "learning_rate": 9.875000000000001e-06,
65
- "loss": 12.6209,
66
- "step": 80
67
- },
68
- {
69
- "epoch": 0.18828451882845187,
70
- "grad_norm": 146.31329345703125,
71
- "learning_rate": 1.1125000000000001e-05,
72
- "loss": 11.8609,
73
- "step": 90
74
- },
75
- {
76
- "epoch": 0.20920502092050208,
77
- "grad_norm": 104.998779296875,
78
- "learning_rate": 1.2375000000000001e-05,
79
- "loss": 11.3592,
80
- "step": 100
81
- },
82
- {
83
- "epoch": 0.2301255230125523,
84
- "grad_norm": 1490.1988525390625,
85
- "learning_rate": 1.3625e-05,
86
- "loss": 11.5975,
87
- "step": 110
88
- },
89
- {
90
- "epoch": 0.2510460251046025,
91
- "grad_norm": 210.4671173095703,
92
- "learning_rate": 1.4875e-05,
93
- "loss": 11.702,
94
- "step": 120
95
- },
96
- {
97
- "epoch": 0.2719665271966527,
98
- "grad_norm": 160.14736938476562,
99
- "learning_rate": 1.6125000000000002e-05,
100
- "loss": 11.3381,
101
- "step": 130
102
- },
103
- {
104
- "epoch": 0.2928870292887029,
105
- "grad_norm": 105.9501953125,
106
- "learning_rate": 1.7375e-05,
107
- "loss": 11.3639,
108
- "step": 140
109
- },
110
- {
111
- "epoch": 0.3138075313807531,
112
- "grad_norm": 548.2748413085938,
113
- "learning_rate": 1.8625000000000002e-05,
114
- "loss": 11.4813,
115
- "step": 150
116
- },
117
- {
118
- "epoch": 0.33472803347280333,
119
- "grad_norm": 1656.474853515625,
120
- "learning_rate": 1.9875000000000002e-05,
121
- "loss": 11.8497,
122
- "step": 160
123
- },
124
- {
125
- "epoch": 0.35564853556485354,
126
- "grad_norm": 860.3178100585938,
127
- "learning_rate": 2.1125000000000002e-05,
128
- "loss": 11.8934,
129
- "step": 170
130
- },
131
- {
132
- "epoch": 0.37656903765690375,
133
- "grad_norm": 238.6591339111328,
134
- "learning_rate": 2.2375000000000002e-05,
135
- "loss": 11.6121,
136
- "step": 180
137
- },
138
- {
139
- "epoch": 0.39748953974895396,
140
- "grad_norm": 182.48626708984375,
141
- "learning_rate": 2.3624999999999998e-05,
142
- "loss": 11.6588,
143
- "step": 190
144
- },
145
- {
146
- "epoch": 0.41841004184100417,
147
- "grad_norm": 208.10049438476562,
148
- "learning_rate": 2.4875e-05,
149
- "loss": 10.6958,
150
- "step": 200
151
- },
152
- {
153
- "epoch": 0.4393305439330544,
154
- "grad_norm": 172.2975311279297,
155
- "learning_rate": 2.6124999999999998e-05,
156
- "loss": 11.1831,
157
- "step": 210
158
- },
159
- {
160
- "epoch": 0.4602510460251046,
161
- "grad_norm": 84.67173767089844,
162
- "learning_rate": 2.7375e-05,
163
- "loss": 9.9578,
164
- "step": 220
165
- },
166
- {
167
- "epoch": 0.4811715481171548,
168
- "grad_norm": 118.71012115478516,
169
- "learning_rate": 2.8625e-05,
170
- "loss": 12.109,
171
- "step": 230
172
- },
173
- {
174
- "epoch": 0.502092050209205,
175
- "grad_norm": 86.66356658935547,
176
- "learning_rate": 2.9875000000000004e-05,
177
- "loss": 10.0945,
178
- "step": 240
179
- },
180
- {
181
- "epoch": 0.5230125523012552,
182
- "grad_norm": 134.87005615234375,
183
- "learning_rate": 3.1125000000000004e-05,
184
- "loss": 10.3667,
185
- "step": 250
186
- },
187
- {
188
- "epoch": 0.5439330543933054,
189
- "grad_norm": 72.37677001953125,
190
- "learning_rate": 3.2375e-05,
191
- "loss": 11.139,
192
- "step": 260
193
- },
194
- {
195
- "epoch": 0.5648535564853556,
196
- "grad_norm": 885.1691284179688,
197
- "learning_rate": 3.3625000000000004e-05,
198
- "loss": 13.4426,
199
- "step": 270
200
- },
201
- {
202
- "epoch": 0.5857740585774058,
203
- "grad_norm": 73.6139907836914,
204
- "learning_rate": 3.4875e-05,
205
- "loss": 10.569,
206
- "step": 280
207
- },
208
- {
209
- "epoch": 0.606694560669456,
210
- "grad_norm": 136.91339111328125,
211
- "learning_rate": 3.6125000000000004e-05,
212
- "loss": 10.4833,
213
- "step": 290
214
- },
215
- {
216
- "epoch": 0.6276150627615062,
217
- "grad_norm": 364.68756103515625,
218
- "learning_rate": 3.737500000000001e-05,
219
- "loss": 9.6223,
220
- "step": 300
221
- },
222
- {
223
- "epoch": 0.6485355648535565,
224
- "grad_norm": 75.64944458007812,
225
- "learning_rate": 3.8625e-05,
226
- "loss": 9.6282,
227
- "step": 310
228
- },
229
- {
230
- "epoch": 0.6694560669456067,
231
- "grad_norm": 52.43100357055664,
232
- "learning_rate": 3.9875e-05,
233
- "loss": 9.1569,
234
- "step": 320
235
- },
236
- {
237
- "epoch": 0.6903765690376569,
238
- "grad_norm": 53.303531646728516,
239
- "learning_rate": 4.1125000000000004e-05,
240
- "loss": 8.8418,
241
- "step": 330
242
- },
243
- {
244
- "epoch": 0.7112970711297071,
245
- "grad_norm": 19.971961975097656,
246
- "learning_rate": 4.237500000000001e-05,
247
- "loss": 8.2994,
248
- "step": 340
249
- },
250
- {
251
- "epoch": 0.7322175732217573,
252
- "grad_norm": 40.7735595703125,
253
- "learning_rate": 4.3625e-05,
254
- "loss": 8.2646,
255
- "step": 350
256
- },
257
- {
258
- "epoch": 0.7531380753138075,
259
- "grad_norm": 20.076519012451172,
260
- "learning_rate": 4.4875e-05,
261
- "loss": 8.1216,
262
- "step": 360
263
- },
264
- {
265
- "epoch": 0.7740585774058577,
266
- "grad_norm": 31.376934051513672,
267
- "learning_rate": 4.6125e-05,
268
- "loss": 8.1275,
269
- "step": 370
270
- },
271
- {
272
- "epoch": 0.7949790794979079,
273
- "grad_norm": 15.157088279724121,
274
- "learning_rate": 4.7375e-05,
275
- "loss": 7.9889,
276
- "step": 380
277
- },
278
- {
279
- "epoch": 0.8158995815899581,
280
- "grad_norm": 14.430716514587402,
281
- "learning_rate": 4.8625e-05,
282
- "loss": 7.814,
283
- "step": 390
284
- },
285
- {
286
- "epoch": 0.8368200836820083,
287
- "grad_norm": 39.173133850097656,
288
- "learning_rate": 4.9875000000000006e-05,
289
- "loss": 7.7884,
290
- "step": 400
291
- },
292
- {
293
- "epoch": 0.8577405857740585,
294
- "grad_norm": 15.446069717407227,
295
- "learning_rate": 4.986857476635514e-05,
296
- "loss": 7.7428,
297
- "step": 410
298
- },
299
- {
300
- "epoch": 0.8786610878661087,
301
- "grad_norm": 20.680313110351562,
302
- "learning_rate": 4.9722546728971967e-05,
303
- "loss": 7.656,
304
- "step": 420
305
- },
306
- {
307
- "epoch": 0.899581589958159,
308
- "grad_norm": 9.957335472106934,
309
- "learning_rate": 4.9576518691588786e-05,
310
- "loss": 7.5898,
311
- "step": 430
312
- },
313
- {
314
- "epoch": 0.9205020920502092,
315
- "grad_norm": 12.279107093811035,
316
- "learning_rate": 4.9430490654205605e-05,
317
- "loss": 7.5838,
318
- "step": 440
319
- },
320
- {
321
- "epoch": 0.9414225941422594,
322
- "grad_norm": 10.93378734588623,
323
- "learning_rate": 4.928446261682243e-05,
324
- "loss": 7.5065,
325
- "step": 450
326
- },
327
- {
328
- "epoch": 0.9623430962343096,
329
- "grad_norm": 16.950965881347656,
330
- "learning_rate": 4.913843457943925e-05,
331
- "loss": 7.4611,
332
- "step": 460
333
- },
334
- {
335
- "epoch": 0.9832635983263598,
336
- "grad_norm": 12.039278984069824,
337
- "learning_rate": 4.899240654205608e-05,
338
- "loss": 7.4121,
339
- "step": 470
340
- },
341
- {
342
- "epoch": 1.00418410041841,
343
- "grad_norm": 9.576797485351562,
344
- "learning_rate": 4.88463785046729e-05,
345
- "loss": 7.3744,
346
- "step": 480
347
- },
348
- {
349
- "epoch": 1.0251046025104602,
350
- "grad_norm": 12.095304489135742,
351
- "learning_rate": 4.870035046728972e-05,
352
- "loss": 7.2232,
353
- "step": 490
354
- },
355
- {
356
- "epoch": 1.0460251046025104,
357
- "grad_norm": 8.302742004394531,
358
- "learning_rate": 4.855432242990655e-05,
359
- "loss": 7.2095,
360
- "step": 500
361
- },
362
- {
363
- "epoch": 1.0669456066945606,
364
- "grad_norm": 8.745416641235352,
365
- "learning_rate": 4.840829439252337e-05,
366
- "loss": 7.2109,
367
- "step": 510
368
- },
369
- {
370
- "epoch": 1.0878661087866108,
371
- "grad_norm": 8.442458152770996,
372
- "learning_rate": 4.826226635514019e-05,
373
- "loss": 7.189,
374
- "step": 520
375
- },
376
- {
377
- "epoch": 1.108786610878661,
378
- "grad_norm": 13.672759056091309,
379
- "learning_rate": 4.811623831775701e-05,
380
- "loss": 7.1717,
381
- "step": 530
382
- },
383
- {
384
- "epoch": 1.1297071129707112,
385
- "grad_norm": 8.616190910339355,
386
- "learning_rate": 4.797021028037383e-05,
387
- "loss": 7.129,
388
- "step": 540
389
- },
390
- {
391
- "epoch": 1.1506276150627615,
392
- "grad_norm": 10.505746841430664,
393
- "learning_rate": 4.782418224299066e-05,
394
- "loss": 7.141,
395
- "step": 550
396
- },
397
- {
398
- "epoch": 1.1715481171548117,
399
- "grad_norm": 8.96762752532959,
400
- "learning_rate": 4.767815420560748e-05,
401
- "loss": 7.0351,
402
- "step": 560
403
- },
404
- {
405
- "epoch": 1.1924686192468619,
406
- "grad_norm": 8.926515579223633,
407
- "learning_rate": 4.75321261682243e-05,
408
- "loss": 7.0872,
409
- "step": 570
410
- },
411
- {
412
- "epoch": 1.213389121338912,
413
- "grad_norm": 8.333108901977539,
414
- "learning_rate": 4.738609813084112e-05,
415
- "loss": 7.0712,
416
- "step": 580
417
- },
418
- {
419
- "epoch": 1.2343096234309623,
420
- "grad_norm": 8.47938060760498,
421
- "learning_rate": 4.724007009345794e-05,
422
- "loss": 7.0503,
423
- "step": 590
424
- },
425
- {
426
- "epoch": 1.2552301255230125,
427
- "grad_norm": 9.584643363952637,
428
- "learning_rate": 4.709404205607477e-05,
429
- "loss": 7.0105,
430
- "step": 600
431
- },
432
- {
433
- "epoch": 1.2761506276150627,
434
- "grad_norm": 8.007026672363281,
435
- "learning_rate": 4.694801401869159e-05,
436
- "loss": 7.0177,
437
- "step": 610
438
- },
439
- {
440
- "epoch": 1.297071129707113,
441
- "grad_norm": 7.1111273765563965,
442
- "learning_rate": 4.6801985981308414e-05,
443
- "loss": 6.9577,
444
- "step": 620
445
- },
446
- {
447
- "epoch": 1.3179916317991631,
448
- "grad_norm": 8.123859405517578,
449
- "learning_rate": 4.665595794392524e-05,
450
- "loss": 6.9265,
451
- "step": 630
452
- },
453
- {
454
- "epoch": 1.3389121338912133,
455
- "grad_norm": 8.18316650390625,
456
- "learning_rate": 4.650992990654206e-05,
457
- "loss": 6.9556,
458
- "step": 640
459
- },
460
- {
461
- "epoch": 1.3598326359832635,
462
- "grad_norm": 7.255918025970459,
463
- "learning_rate": 4.6363901869158886e-05,
464
- "loss": 6.9137,
465
- "step": 650
466
- },
467
- {
468
- "epoch": 1.3807531380753137,
469
- "grad_norm": 8.160347938537598,
470
- "learning_rate": 4.6217873831775705e-05,
471
- "loss": 6.9551,
472
- "step": 660
473
- },
474
- {
475
- "epoch": 1.401673640167364,
476
- "grad_norm": 7.006373405456543,
477
- "learning_rate": 4.6071845794392524e-05,
478
- "loss": 6.9114,
479
- "step": 670
480
- },
481
- {
482
- "epoch": 1.4225941422594142,
483
- "grad_norm": 8.064070701599121,
484
- "learning_rate": 4.592581775700935e-05,
485
- "loss": 6.9034,
486
- "step": 680
487
- },
488
- {
489
- "epoch": 1.4435146443514644,
490
- "grad_norm": 8.872171401977539,
491
- "learning_rate": 4.577978971962617e-05,
492
- "loss": 6.9638,
493
- "step": 690
494
- },
495
- {
496
- "epoch": 1.4644351464435146,
497
- "grad_norm": 7.363138198852539,
498
- "learning_rate": 4.5633761682242996e-05,
499
- "loss": 6.8616,
500
- "step": 700
501
- },
502
- {
503
- "epoch": 1.4853556485355648,
504
- "grad_norm": 7.530289649963379,
505
- "learning_rate": 4.5487733644859815e-05,
506
- "loss": 6.8221,
507
- "step": 710
508
- },
509
- {
510
- "epoch": 1.506276150627615,
511
- "grad_norm": 6.406079292297363,
512
- "learning_rate": 4.5341705607476634e-05,
513
- "loss": 6.9144,
514
- "step": 720
515
- },
516
- {
517
- "epoch": 1.5271966527196654,
518
- "grad_norm": 7.089895248413086,
519
- "learning_rate": 4.519567757009346e-05,
520
- "loss": 6.8353,
521
- "step": 730
522
- },
523
- {
524
- "epoch": 1.5481171548117154,
525
- "grad_norm": 7.179967880249023,
526
- "learning_rate": 4.504964953271028e-05,
527
- "loss": 6.8086,
528
- "step": 740
529
- },
530
- {
531
- "epoch": 1.5690376569037658,
532
- "grad_norm": 7.228494644165039,
533
- "learning_rate": 4.4903621495327106e-05,
534
- "loss": 6.8486,
535
- "step": 750
536
- },
537
- {
538
- "epoch": 1.5899581589958158,
539
- "grad_norm": 7.923924922943115,
540
- "learning_rate": 4.4757593457943925e-05,
541
- "loss": 6.852,
542
- "step": 760
543
- },
544
- {
545
- "epoch": 1.6108786610878663,
546
- "grad_norm": 6.542073726654053,
547
- "learning_rate": 4.461156542056075e-05,
548
- "loss": 6.7772,
549
- "step": 770
550
- },
551
- {
552
- "epoch": 1.6317991631799162,
553
- "grad_norm": 7.912067413330078,
554
- "learning_rate": 4.446553738317758e-05,
555
- "loss": 6.7579,
556
- "step": 780
557
- },
558
- {
559
- "epoch": 1.6527196652719667,
560
- "grad_norm": 7.274072647094727,
561
- "learning_rate": 4.43195093457944e-05,
562
- "loss": 6.792,
563
- "step": 790
564
- },
565
- {
566
- "epoch": 1.6736401673640167,
567
- "grad_norm": 6.865484714508057,
568
- "learning_rate": 4.4173481308411216e-05,
569
- "loss": 6.7252,
570
- "step": 800
571
- },
572
- {
573
- "epoch": 1.694560669456067,
574
- "grad_norm": 7.647111415863037,
575
- "learning_rate": 4.402745327102804e-05,
576
- "loss": 6.7781,
577
- "step": 810
578
- },
579
- {
580
- "epoch": 1.715481171548117,
581
- "grad_norm": 7.3276472091674805,
582
- "learning_rate": 4.388142523364486e-05,
583
- "loss": 6.7377,
584
- "step": 820
585
- },
586
- {
587
- "epoch": 1.7364016736401675,
588
- "grad_norm": 6.622502326965332,
589
- "learning_rate": 4.373539719626169e-05,
590
- "loss": 6.7379,
591
- "step": 830
592
- },
593
- {
594
- "epoch": 1.7573221757322175,
595
- "grad_norm": 7.060279369354248,
596
- "learning_rate": 4.358936915887851e-05,
597
- "loss": 6.6772,
598
- "step": 840
599
- },
600
- {
601
- "epoch": 1.778242677824268,
602
- "grad_norm": 6.371649265289307,
603
- "learning_rate": 4.3443341121495326e-05,
604
- "loss": 6.737,
605
- "step": 850
606
- },
607
- {
608
- "epoch": 1.799163179916318,
609
- "grad_norm": 6.9483208656311035,
610
- "learning_rate": 4.329731308411215e-05,
611
- "loss": 6.6993,
612
- "step": 860
613
- },
614
- {
615
- "epoch": 1.8200836820083683,
616
- "grad_norm": 7.357904434204102,
617
- "learning_rate": 4.315128504672897e-05,
618
- "loss": 6.5978,
619
- "step": 870
620
- },
621
- {
622
- "epoch": 1.8410041841004183,
623
- "grad_norm": 6.23211145401001,
624
- "learning_rate": 4.30052570093458e-05,
625
- "loss": 6.672,
626
- "step": 880
627
- },
628
- {
629
- "epoch": 1.8619246861924688,
630
- "grad_norm": 6.585605144500732,
631
- "learning_rate": 4.285922897196262e-05,
632
- "loss": 6.6937,
633
- "step": 890
634
- },
635
- {
636
- "epoch": 1.8828451882845187,
637
- "grad_norm": 6.763492584228516,
638
- "learning_rate": 4.2713200934579436e-05,
639
- "loss": 6.6514,
640
- "step": 900
641
- },
642
- {
643
- "epoch": 1.9037656903765692,
644
- "grad_norm": 7.7488932609558105,
645
- "learning_rate": 4.256717289719626e-05,
646
- "loss": 6.6361,
647
- "step": 910
648
- },
649
- {
650
- "epoch": 1.9246861924686192,
651
- "grad_norm": 7.148327350616455,
652
- "learning_rate": 4.242114485981308e-05,
653
- "loss": 6.6667,
654
- "step": 920
655
- },
656
- {
657
- "epoch": 1.9456066945606696,
658
- "grad_norm": 6.471075534820557,
659
- "learning_rate": 4.227511682242991e-05,
660
- "loss": 6.6593,
661
- "step": 930
662
- },
663
- {
664
- "epoch": 1.9665271966527196,
665
- "grad_norm": 6.7469048500061035,
666
- "learning_rate": 4.2129088785046734e-05,
667
- "loss": 6.605,
668
- "step": 940
669
- },
670
- {
671
- "epoch": 1.98744769874477,
672
- "grad_norm": 8.148894309997559,
673
- "learning_rate": 4.198306074766355e-05,
674
- "loss": 6.6451,
675
- "step": 950
676
- },
677
- {
678
- "epoch": 2.00836820083682,
679
- "grad_norm": 7.104391574859619,
680
- "learning_rate": 4.183703271028038e-05,
681
- "loss": 6.4688,
682
- "step": 960
683
- },
684
- {
685
- "epoch": 2.0292887029288704,
686
- "grad_norm": 7.012016773223877,
687
- "learning_rate": 4.16910046728972e-05,
688
- "loss": 6.25,
689
- "step": 970
690
- },
691
- {
692
- "epoch": 2.0502092050209204,
693
- "grad_norm": 7.2709641456604,
694
- "learning_rate": 4.154497663551402e-05,
695
- "loss": 6.2587,
696
- "step": 980
697
- },
698
- {
699
- "epoch": 2.071129707112971,
700
- "grad_norm": 7.433716297149658,
701
- "learning_rate": 4.1398948598130844e-05,
702
- "loss": 6.2431,
703
- "step": 990
704
- },
705
- {
706
- "epoch": 2.092050209205021,
707
- "grad_norm": 6.091396808624268,
708
- "learning_rate": 4.1252920560747664e-05,
709
- "loss": 6.1624,
710
- "step": 1000
711
- },
712
- {
713
- "epoch": 2.1129707112970713,
714
- "grad_norm": 5.893011093139648,
715
- "learning_rate": 4.110689252336449e-05,
716
- "loss": 6.2354,
717
- "step": 1010
718
- },
719
- {
720
- "epoch": 2.1338912133891212,
721
- "grad_norm": 7.6410322189331055,
722
- "learning_rate": 4.096086448598131e-05,
723
- "loss": 6.2472,
724
- "step": 1020
725
- },
726
- {
727
- "epoch": 2.1548117154811717,
728
- "grad_norm": 6.117848873138428,
729
- "learning_rate": 4.081483644859813e-05,
730
- "loss": 6.2394,
731
- "step": 1030
732
- },
733
- {
734
- "epoch": 2.1757322175732217,
735
- "grad_norm": 6.71469783782959,
736
- "learning_rate": 4.0668808411214954e-05,
737
- "loss": 6.1807,
738
- "step": 1040
739
- },
740
- {
741
- "epoch": 2.196652719665272,
742
- "grad_norm": 6.995176792144775,
743
- "learning_rate": 4.0522780373831774e-05,
744
- "loss": 6.2217,
745
- "step": 1050
746
- },
747
- {
748
- "epoch": 2.217573221757322,
749
- "grad_norm": 6.745790481567383,
750
- "learning_rate": 4.03767523364486e-05,
751
- "loss": 6.2129,
752
- "step": 1060
753
- },
754
- {
755
- "epoch": 2.2384937238493725,
756
- "grad_norm": 6.657952308654785,
757
- "learning_rate": 4.023072429906542e-05,
758
- "loss": 6.2246,
759
- "step": 1070
760
- },
761
- {
762
- "epoch": 2.2594142259414225,
763
- "grad_norm": 6.063588619232178,
764
- "learning_rate": 4.0084696261682245e-05,
765
- "loss": 6.2326,
766
- "step": 1080
767
- },
768
- {
769
- "epoch": 2.280334728033473,
770
- "grad_norm": 6.952447414398193,
771
- "learning_rate": 3.993866822429907e-05,
772
- "loss": 6.1872,
773
- "step": 1090
774
- },
775
- {
776
- "epoch": 2.301255230125523,
777
- "grad_norm": 5.979004859924316,
778
- "learning_rate": 3.979264018691589e-05,
779
- "loss": 6.2449,
780
- "step": 1100
781
- },
782
- {
783
- "epoch": 2.3221757322175733,
784
- "grad_norm": 6.522477626800537,
785
- "learning_rate": 3.964661214953272e-05,
786
- "loss": 6.2348,
787
- "step": 1110
788
- },
789
- {
790
- "epoch": 2.3430962343096233,
791
- "grad_norm": 6.758068084716797,
792
- "learning_rate": 3.9500584112149536e-05,
793
- "loss": 6.1823,
794
- "step": 1120
795
- },
796
- {
797
- "epoch": 2.3640167364016738,
798
- "grad_norm": 6.327610015869141,
799
- "learning_rate": 3.9354556074766355e-05,
800
- "loss": 6.2532,
801
- "step": 1130
802
- },
803
- {
804
- "epoch": 2.3849372384937237,
805
- "grad_norm": 6.386441707611084,
806
- "learning_rate": 3.920852803738318e-05,
807
- "loss": 6.2021,
808
- "step": 1140
809
- },
810
- {
811
- "epoch": 2.405857740585774,
812
- "grad_norm": 6.97573709487915,
813
- "learning_rate": 3.90625e-05,
814
- "loss": 6.1731,
815
- "step": 1150
816
- },
817
- {
818
- "epoch": 2.426778242677824,
819
- "grad_norm": 6.362220287322998,
820
- "learning_rate": 3.891647196261683e-05,
821
- "loss": 6.1964,
822
- "step": 1160
823
- },
824
- {
825
- "epoch": 2.4476987447698746,
826
- "grad_norm": 6.3254570960998535,
827
- "learning_rate": 3.8770443925233646e-05,
828
- "loss": 6.1608,
829
- "step": 1170
830
- },
831
- {
832
- "epoch": 2.4686192468619246,
833
- "grad_norm": 6.198873996734619,
834
- "learning_rate": 3.8624415887850466e-05,
835
- "loss": 6.2224,
836
- "step": 1180
837
- },
838
- {
839
- "epoch": 2.489539748953975,
840
- "grad_norm": 6.077391147613525,
841
- "learning_rate": 3.847838785046729e-05,
842
- "loss": 6.1763,
843
- "step": 1190
844
- },
845
- {
846
- "epoch": 2.510460251046025,
847
- "grad_norm": 5.855481147766113,
848
- "learning_rate": 3.833235981308411e-05,
849
- "loss": 6.2059,
850
- "step": 1200
851
- },
852
- {
853
- "epoch": 2.5313807531380754,
854
- "grad_norm": 6.103949546813965,
855
- "learning_rate": 3.818633177570094e-05,
856
- "loss": 6.1737,
857
- "step": 1210
858
- },
859
- {
860
- "epoch": 2.5523012552301254,
861
- "grad_norm": 6.415419101715088,
862
- "learning_rate": 3.8040303738317756e-05,
863
- "loss": 6.1564,
864
- "step": 1220
865
- },
866
- {
867
- "epoch": 2.573221757322176,
868
- "grad_norm": 7.077912330627441,
869
- "learning_rate": 3.789427570093458e-05,
870
- "loss": 6.1761,
871
- "step": 1230
872
- },
873
- {
874
- "epoch": 2.594142259414226,
875
- "grad_norm": 6.930490493774414,
876
- "learning_rate": 3.774824766355141e-05,
877
- "loss": 6.1953,
878
- "step": 1240
879
- },
880
- {
881
- "epoch": 2.6150627615062763,
882
- "grad_norm": 6.841230869293213,
883
- "learning_rate": 3.760221962616823e-05,
884
- "loss": 6.1756,
885
- "step": 1250
886
- },
887
- {
888
- "epoch": 2.6359832635983262,
889
- "grad_norm": 7.52451229095459,
890
- "learning_rate": 3.745619158878505e-05,
891
- "loss": 6.1319,
892
- "step": 1260
893
- },
894
- {
895
- "epoch": 2.6569037656903767,
896
- "grad_norm": 6.8930983543396,
897
- "learning_rate": 3.731016355140187e-05,
898
- "loss": 6.1371,
899
- "step": 1270
900
- },
901
- {
902
- "epoch": 2.6778242677824267,
903
- "grad_norm": 6.321527481079102,
904
- "learning_rate": 3.716413551401869e-05,
905
- "loss": 6.1513,
906
- "step": 1280
907
- },
908
- {
909
- "epoch": 2.698744769874477,
910
- "grad_norm": 5.808718681335449,
911
- "learning_rate": 3.701810747663552e-05,
912
- "loss": 6.1616,
913
- "step": 1290
914
- },
915
- {
916
- "epoch": 2.719665271966527,
917
- "grad_norm": 5.840999126434326,
918
- "learning_rate": 3.687207943925234e-05,
919
- "loss": 6.1374,
920
- "step": 1300
921
- },
922
- {
923
- "epoch": 2.7405857740585775,
924
- "grad_norm": 6.135018825531006,
925
- "learning_rate": 3.672605140186916e-05,
926
- "loss": 6.1664,
927
- "step": 1310
928
- },
929
- {
930
- "epoch": 2.7615062761506275,
931
- "grad_norm": 5.817986011505127,
932
- "learning_rate": 3.6580023364485984e-05,
933
- "loss": 6.1526,
934
- "step": 1320
935
- },
936
- {
937
- "epoch": 2.782426778242678,
938
- "grad_norm": 5.912154197692871,
939
- "learning_rate": 3.64339953271028e-05,
940
- "loss": 6.1267,
941
- "step": 1330
942
- },
943
- {
944
- "epoch": 2.803347280334728,
945
- "grad_norm": 6.677772521972656,
946
- "learning_rate": 3.628796728971963e-05,
947
- "loss": 6.1594,
948
- "step": 1340
949
- },
950
- {
951
- "epoch": 2.8242677824267783,
952
- "grad_norm": 5.715672969818115,
953
- "learning_rate": 3.614193925233645e-05,
954
- "loss": 6.1061,
955
- "step": 1350
956
- },
957
- {
958
- "epoch": 2.8451882845188283,
959
- "grad_norm": 5.800394535064697,
960
- "learning_rate": 3.599591121495327e-05,
961
- "loss": 6.1243,
962
- "step": 1360
963
- },
964
- {
965
- "epoch": 2.8661087866108788,
966
- "grad_norm": 6.575775623321533,
967
- "learning_rate": 3.5849883177570094e-05,
968
- "loss": 6.1525,
969
- "step": 1370
970
- },
971
- {
972
- "epoch": 2.8870292887029287,
973
- "grad_norm": 6.729445457458496,
974
- "learning_rate": 3.570385514018692e-05,
975
- "loss": 6.1545,
976
- "step": 1380
977
- },
978
- {
979
- "epoch": 2.907949790794979,
980
- "grad_norm": 5.717982769012451,
981
- "learning_rate": 3.555782710280374e-05,
982
- "loss": 6.0682,
983
- "step": 1390
984
- },
985
- {
986
- "epoch": 2.928870292887029,
987
- "grad_norm": 6.721540927886963,
988
- "learning_rate": 3.5411799065420565e-05,
989
- "loss": 6.0828,
990
- "step": 1400
991
- },
992
- {
993
- "epoch": 2.9497907949790796,
994
- "grad_norm": 4.733557224273682,
995
- "learning_rate": 3.5265771028037385e-05,
996
- "loss": 6.0791,
997
- "step": 1410
998
- },
999
- {
1000
- "epoch": 2.9707112970711296,
1001
- "grad_norm": 5.717038631439209,
1002
- "learning_rate": 3.511974299065421e-05,
1003
- "loss": 6.1162,
1004
- "step": 1420
1005
- },
1006
- {
1007
- "epoch": 2.99163179916318,
1008
- "grad_norm": 6.3350749015808105,
1009
- "learning_rate": 3.497371495327103e-05,
1010
- "loss": 6.1613,
1011
- "step": 1430
1012
- },
1013
- {
1014
- "epoch": 3.01255230125523,
1015
- "grad_norm": 5.267285346984863,
1016
- "learning_rate": 3.4827686915887856e-05,
1017
- "loss": 5.9522,
1018
- "step": 1440
1019
- },
1020
- {
1021
- "epoch": 3.0334728033472804,
1022
- "grad_norm": 5.272197246551514,
1023
- "learning_rate": 3.4681658878504675e-05,
1024
- "loss": 5.8387,
1025
- "step": 1450
1026
- },
1027
- {
1028
- "epoch": 3.0543933054393304,
1029
- "grad_norm": 5.706630706787109,
1030
- "learning_rate": 3.4535630841121495e-05,
1031
- "loss": 5.7954,
1032
- "step": 1460
1033
- },
1034
- {
1035
- "epoch": 3.075313807531381,
1036
- "grad_norm": 4.603658676147461,
1037
- "learning_rate": 3.438960280373832e-05,
1038
- "loss": 5.8205,
1039
- "step": 1470
1040
- },
1041
- {
1042
- "epoch": 3.096234309623431,
1043
- "grad_norm": 5.040886402130127,
1044
- "learning_rate": 3.424357476635514e-05,
1045
- "loss": 5.8124,
1046
- "step": 1480
1047
- },
1048
- {
1049
- "epoch": 3.1171548117154813,
1050
- "grad_norm": 5.242745876312256,
1051
- "learning_rate": 3.4097546728971966e-05,
1052
- "loss": 5.7672,
1053
- "step": 1490
1054
- },
1055
- {
1056
- "epoch": 3.1380753138075312,
1057
- "grad_norm": 4.996242046356201,
1058
- "learning_rate": 3.3951518691588786e-05,
1059
- "loss": 5.8621,
1060
- "step": 1500
1061
- },
1062
- {
1063
- "epoch": 3.1589958158995817,
1064
- "grad_norm": 4.772254943847656,
1065
- "learning_rate": 3.3805490654205605e-05,
1066
- "loss": 5.7801,
1067
- "step": 1510
1068
- },
1069
- {
1070
- "epoch": 3.1799163179916317,
1071
- "grad_norm": 4.298367023468018,
1072
- "learning_rate": 3.365946261682243e-05,
1073
- "loss": 5.816,
1074
- "step": 1520
1075
- },
1076
- {
1077
- "epoch": 3.200836820083682,
1078
- "grad_norm": 4.564356327056885,
1079
- "learning_rate": 3.351343457943925e-05,
1080
- "loss": 5.8222,
1081
- "step": 1530
1082
- },
1083
- {
1084
- "epoch": 3.221757322175732,
1085
- "grad_norm": 5.478094577789307,
1086
- "learning_rate": 3.3367406542056076e-05,
1087
- "loss": 5.8517,
1088
- "step": 1540
1089
- },
1090
- {
1091
- "epoch": 3.2426778242677825,
1092
- "grad_norm": 5.010961055755615,
1093
- "learning_rate": 3.32213785046729e-05,
1094
- "loss": 5.7739,
1095
- "step": 1550
1096
- },
1097
- {
1098
- "epoch": 3.2635983263598325,
1099
- "grad_norm": 6.255635738372803,
1100
- "learning_rate": 3.307535046728972e-05,
1101
- "loss": 5.7966,
1102
- "step": 1560
1103
- },
1104
- {
1105
- "epoch": 3.284518828451883,
1106
- "grad_norm": 5.523530006408691,
1107
- "learning_rate": 3.292932242990655e-05,
1108
- "loss": 5.8212,
1109
- "step": 1570
1110
- },
1111
- {
1112
- "epoch": 3.305439330543933,
1113
- "grad_norm": 5.291126251220703,
1114
- "learning_rate": 3.278329439252337e-05,
1115
- "loss": 5.7978,
1116
- "step": 1580
1117
- },
1118
- {
1119
- "epoch": 3.3263598326359833,
1120
- "grad_norm": 5.207418441772461,
1121
- "learning_rate": 3.2637266355140187e-05,
1122
- "loss": 5.7898,
1123
- "step": 1590
1124
- },
1125
- {
1126
- "epoch": 3.3472803347280333,
1127
- "grad_norm": 6.096604347229004,
1128
- "learning_rate": 3.249123831775701e-05,
1129
- "loss": 5.8281,
1130
- "step": 1600
1131
- },
1132
- {
1133
- "epoch": 3.3682008368200838,
1134
- "grad_norm": 5.792671203613281,
1135
- "learning_rate": 3.234521028037383e-05,
1136
- "loss": 5.8178,
1137
- "step": 1610
1138
- },
1139
- {
1140
- "epoch": 3.3891213389121337,
1141
- "grad_norm": 5.46074914932251,
1142
- "learning_rate": 3.219918224299066e-05,
1143
- "loss": 5.795,
1144
- "step": 1620
1145
- },
1146
- {
1147
- "epoch": 3.410041841004184,
1148
- "grad_norm": 5.38659143447876,
1149
- "learning_rate": 3.205315420560748e-05,
1150
- "loss": 5.8274,
1151
- "step": 1630
1152
- },
1153
- {
1154
- "epoch": 3.430962343096234,
1155
- "grad_norm": 4.891820430755615,
1156
- "learning_rate": 3.19071261682243e-05,
1157
- "loss": 5.7913,
1158
- "step": 1640
1159
- },
1160
- {
1161
- "epoch": 3.4518828451882846,
1162
- "grad_norm": 5.1778340339660645,
1163
- "learning_rate": 3.176109813084112e-05,
1164
- "loss": 5.7899,
1165
- "step": 1650
1166
- },
1167
- {
1168
- "epoch": 3.4728033472803346,
1169
- "grad_norm": 5.588263511657715,
1170
- "learning_rate": 3.161507009345794e-05,
1171
- "loss": 5.8256,
1172
- "step": 1660
1173
- },
1174
- {
1175
- "epoch": 3.493723849372385,
1176
- "grad_norm": 4.890290260314941,
1177
- "learning_rate": 3.146904205607477e-05,
1178
- "loss": 5.7687,
1179
- "step": 1670
1180
- },
1181
- {
1182
- "epoch": 3.514644351464435,
1183
- "grad_norm": 5.809788703918457,
1184
- "learning_rate": 3.132301401869159e-05,
1185
- "loss": 5.8031,
1186
- "step": 1680
1187
- },
1188
- {
1189
- "epoch": 3.5355648535564854,
1190
- "grad_norm": 5.841116428375244,
1191
- "learning_rate": 3.1176985981308414e-05,
1192
- "loss": 5.8229,
1193
- "step": 1690
1194
- },
1195
- {
1196
- "epoch": 3.5564853556485354,
1197
- "grad_norm": 5.334028244018555,
1198
- "learning_rate": 3.103095794392524e-05,
1199
- "loss": 5.8391,
1200
- "step": 1700
1201
- },
1202
- {
1203
- "epoch": 3.577405857740586,
1204
- "grad_norm": 5.291631698608398,
1205
- "learning_rate": 3.088492990654206e-05,
1206
- "loss": 5.7926,
1207
- "step": 1710
1208
- },
1209
- {
1210
- "epoch": 3.598326359832636,
1211
- "grad_norm": 5.6616997718811035,
1212
- "learning_rate": 3.073890186915888e-05,
1213
- "loss": 5.7975,
1214
- "step": 1720
1215
- },
1216
- {
1217
- "epoch": 3.6192468619246863,
1218
- "grad_norm": 5.7631964683532715,
1219
- "learning_rate": 3.0592873831775705e-05,
1220
- "loss": 5.8042,
1221
- "step": 1730
1222
- },
1223
- {
1224
- "epoch": 3.6401673640167362,
1225
- "grad_norm": 4.754581451416016,
1226
- "learning_rate": 3.0446845794392527e-05,
1227
- "loss": 5.7831,
1228
- "step": 1740
1229
- },
1230
- {
1231
- "epoch": 3.6610878661087867,
1232
- "grad_norm": 6.7304887771606445,
1233
- "learning_rate": 3.0300817757009347e-05,
1234
- "loss": 5.7739,
1235
- "step": 1750
1236
- },
1237
- {
1238
- "epoch": 3.6820083682008367,
1239
- "grad_norm": 6.9664082527160645,
1240
- "learning_rate": 3.015478971962617e-05,
1241
- "loss": 5.8081,
1242
- "step": 1760
1243
- },
1244
- {
1245
- "epoch": 3.702928870292887,
1246
- "grad_norm": 5.753360748291016,
1247
- "learning_rate": 3.0008761682242992e-05,
1248
- "loss": 5.8073,
1249
- "step": 1770
1250
- },
1251
- {
1252
- "epoch": 3.723849372384937,
1253
- "grad_norm": 4.919591903686523,
1254
- "learning_rate": 2.9862733644859815e-05,
1255
- "loss": 5.7945,
1256
- "step": 1780
1257
- },
1258
- {
1259
- "epoch": 3.7447698744769875,
1260
- "grad_norm": 5.293645858764648,
1261
- "learning_rate": 2.9716705607476637e-05,
1262
- "loss": 5.7561,
1263
- "step": 1790
1264
- },
1265
- {
1266
- "epoch": 3.7656903765690375,
1267
- "grad_norm": 5.434943675994873,
1268
- "learning_rate": 2.9570677570093457e-05,
1269
- "loss": 5.7345,
1270
- "step": 1800
1271
- },
1272
- {
1273
- "epoch": 3.786610878661088,
1274
- "grad_norm": 6.738378524780273,
1275
- "learning_rate": 2.942464953271028e-05,
1276
- "loss": 5.7717,
1277
- "step": 1810
1278
- },
1279
- {
1280
- "epoch": 3.8075313807531384,
1281
- "grad_norm": 4.612103462219238,
1282
- "learning_rate": 2.9278621495327102e-05,
1283
- "loss": 5.8105,
1284
- "step": 1820
1285
- },
1286
- {
1287
- "epoch": 3.8284518828451883,
1288
- "grad_norm": 5.595986843109131,
1289
- "learning_rate": 2.9132593457943925e-05,
1290
- "loss": 5.7609,
1291
- "step": 1830
1292
- },
1293
- {
1294
- "epoch": 3.8493723849372383,
1295
- "grad_norm": 5.030132293701172,
1296
- "learning_rate": 2.898656542056075e-05,
1297
- "loss": 5.7854,
1298
- "step": 1840
1299
- },
1300
- {
1301
- "epoch": 3.8702928870292888,
1302
- "grad_norm": 5.898778915405273,
1303
- "learning_rate": 2.8840537383177574e-05,
1304
- "loss": 5.7913,
1305
- "step": 1850
1306
- },
1307
- {
1308
- "epoch": 3.891213389121339,
1309
- "grad_norm": 5.27553129196167,
1310
- "learning_rate": 2.8694509345794396e-05,
1311
- "loss": 5.767,
1312
- "step": 1860
1313
- },
1314
- {
1315
- "epoch": 3.912133891213389,
1316
- "grad_norm": 5.880008697509766,
1317
- "learning_rate": 2.854848130841122e-05,
1318
- "loss": 5.7837,
1319
- "step": 1870
1320
- },
1321
- {
1322
- "epoch": 3.933054393305439,
1323
- "grad_norm": 5.255915641784668,
1324
- "learning_rate": 2.840245327102804e-05,
1325
- "loss": 5.8109,
1326
- "step": 1880
1327
- },
1328
- {
1329
- "epoch": 3.9539748953974896,
1330
- "grad_norm": 4.918103218078613,
1331
- "learning_rate": 2.825642523364486e-05,
1332
- "loss": 5.7973,
1333
- "step": 1890
1334
- },
1335
- {
1336
- "epoch": 3.97489539748954,
1337
- "grad_norm": 5.274151802062988,
1338
- "learning_rate": 2.8110397196261684e-05,
1339
- "loss": 5.7539,
1340
- "step": 1900
1341
- },
1342
- {
1343
- "epoch": 3.99581589958159,
1344
- "grad_norm": 4.758122444152832,
1345
- "learning_rate": 2.7964369158878507e-05,
1346
- "loss": 5.7861,
1347
- "step": 1910
1348
- },
1349
- {
1350
- "epoch": 4.01673640167364,
1351
- "grad_norm": 4.119150161743164,
1352
- "learning_rate": 2.781834112149533e-05,
1353
- "loss": 5.6516,
1354
- "step": 1920
1355
- },
1356
- {
1357
- "epoch": 4.03765690376569,
1358
- "grad_norm": 5.508482456207275,
1359
- "learning_rate": 2.767231308411215e-05,
1360
- "loss": 5.5955,
1361
- "step": 1930
1362
- },
1363
- {
1364
- "epoch": 4.058577405857741,
1365
- "grad_norm": 4.947022914886475,
1366
- "learning_rate": 2.752628504672897e-05,
1367
- "loss": 5.6185,
1368
- "step": 1940
1369
- },
1370
- {
1371
- "epoch": 4.079497907949791,
1372
- "grad_norm": 4.492798328399658,
1373
- "learning_rate": 2.7380257009345794e-05,
1374
- "loss": 5.5724,
1375
- "step": 1950
1376
- },
1377
- {
1378
- "epoch": 4.100418410041841,
1379
- "grad_norm": 4.8855695724487305,
1380
- "learning_rate": 2.7234228971962617e-05,
1381
- "loss": 5.5846,
1382
- "step": 1960
1383
- },
1384
- {
1385
- "epoch": 4.121338912133891,
1386
- "grad_norm": 4.490469932556152,
1387
- "learning_rate": 2.708820093457944e-05,
1388
- "loss": 5.5799,
1389
- "step": 1970
1390
- },
1391
- {
1392
- "epoch": 4.142259414225942,
1393
- "grad_norm": 4.962728977203369,
1394
- "learning_rate": 2.6942172897196262e-05,
1395
- "loss": 5.5537,
1396
- "step": 1980
1397
- },
1398
- {
1399
- "epoch": 4.163179916317992,
1400
- "grad_norm": 4.632351398468018,
1401
- "learning_rate": 2.679614485981308e-05,
1402
- "loss": 5.5802,
1403
- "step": 1990
1404
- },
1405
- {
1406
- "epoch": 4.184100418410042,
1407
- "grad_norm": 4.545164585113525,
1408
- "learning_rate": 2.665011682242991e-05,
1409
- "loss": 5.5682,
1410
- "step": 2000
1411
- },
1412
- {
1413
- "epoch": 4.205020920502092,
1414
- "grad_norm": 4.569462776184082,
1415
- "learning_rate": 2.6504088785046734e-05,
1416
- "loss": 5.5804,
1417
- "step": 2010
1418
- },
1419
- {
1420
- "epoch": 4.2259414225941425,
1421
- "grad_norm": 4.675363540649414,
1422
- "learning_rate": 2.6358060747663553e-05,
1423
- "loss": 5.5602,
1424
- "step": 2020
1425
- },
1426
- {
1427
- "epoch": 4.2468619246861925,
1428
- "grad_norm": 4.587751865386963,
1429
- "learning_rate": 2.6212032710280376e-05,
1430
- "loss": 5.5693,
1431
- "step": 2030
1432
- },
1433
- {
1434
- "epoch": 4.2677824267782425,
1435
- "grad_norm": 4.584193706512451,
1436
- "learning_rate": 2.60660046728972e-05,
1437
- "loss": 5.5592,
1438
- "step": 2040
1439
- },
1440
- {
1441
- "epoch": 4.2887029288702925,
1442
- "grad_norm": 4.563549995422363,
1443
- "learning_rate": 2.591997663551402e-05,
1444
- "loss": 5.5985,
1445
- "step": 2050
1446
- },
1447
- {
1448
- "epoch": 4.309623430962343,
1449
- "grad_norm": 5.494087219238281,
1450
- "learning_rate": 2.5773948598130844e-05,
1451
- "loss": 5.5519,
1452
- "step": 2060
1453
- },
1454
- {
1455
- "epoch": 4.330543933054393,
1456
- "grad_norm": 4.778312683105469,
1457
- "learning_rate": 2.5627920560747663e-05,
1458
- "loss": 5.528,
1459
- "step": 2070
1460
- },
1461
- {
1462
- "epoch": 4.351464435146443,
1463
- "grad_norm": 4.691561222076416,
1464
- "learning_rate": 2.5481892523364486e-05,
1465
- "loss": 5.608,
1466
- "step": 2080
1467
- },
1468
- {
1469
- "epoch": 4.372384937238493,
1470
- "grad_norm": 5.41495418548584,
1471
- "learning_rate": 2.533586448598131e-05,
1472
- "loss": 5.5799,
1473
- "step": 2090
1474
- },
1475
- {
1476
- "epoch": 4.393305439330544,
1477
- "grad_norm": 4.823550701141357,
1478
- "learning_rate": 2.518983644859813e-05,
1479
- "loss": 5.6088,
1480
- "step": 2100
1481
- },
1482
- {
1483
- "epoch": 4.414225941422594,
1484
- "grad_norm": 4.856595039367676,
1485
- "learning_rate": 2.5043808411214954e-05,
1486
- "loss": 5.553,
1487
- "step": 2110
1488
- },
1489
- {
1490
- "epoch": 4.435146443514644,
1491
- "grad_norm": 5.082206726074219,
1492
- "learning_rate": 2.4897780373831777e-05,
1493
- "loss": 5.5966,
1494
- "step": 2120
1495
- },
1496
- {
1497
- "epoch": 4.456066945606695,
1498
- "grad_norm": 5.180952072143555,
1499
- "learning_rate": 2.47517523364486e-05,
1500
- "loss": 5.5996,
1501
- "step": 2130
1502
- },
1503
- {
1504
- "epoch": 4.476987447698745,
1505
- "grad_norm": 4.806488513946533,
1506
- "learning_rate": 2.4605724299065422e-05,
1507
- "loss": 5.5855,
1508
- "step": 2140
1509
- },
1510
- {
1511
- "epoch": 4.497907949790795,
1512
- "grad_norm": 4.399123191833496,
1513
- "learning_rate": 2.4459696261682245e-05,
1514
- "loss": 5.5542,
1515
- "step": 2150
1516
- },
1517
- {
1518
- "epoch": 4.518828451882845,
1519
- "grad_norm": 5.287519931793213,
1520
- "learning_rate": 2.4313668224299068e-05,
1521
- "loss": 5.5664,
1522
- "step": 2160
1523
- },
1524
- {
1525
- "epoch": 4.539748953974895,
1526
- "grad_norm": 5.102400302886963,
1527
- "learning_rate": 2.4167640186915887e-05,
1528
- "loss": 5.5246,
1529
- "step": 2170
1530
- },
1531
- {
1532
- "epoch": 4.560669456066946,
1533
- "grad_norm": 4.733987331390381,
1534
- "learning_rate": 2.402161214953271e-05,
1535
- "loss": 5.5828,
1536
- "step": 2180
1537
- },
1538
- {
1539
- "epoch": 4.581589958158996,
1540
- "grad_norm": 4.772219657897949,
1541
- "learning_rate": 2.3875584112149536e-05,
1542
- "loss": 5.5427,
1543
- "step": 2190
1544
- },
1545
- {
1546
- "epoch": 4.602510460251046,
1547
- "grad_norm": 5.13002347946167,
1548
- "learning_rate": 2.372955607476636e-05,
1549
- "loss": 5.5812,
1550
- "step": 2200
1551
- },
1552
- {
1553
- "epoch": 4.623430962343097,
1554
- "grad_norm": 4.861030578613281,
1555
- "learning_rate": 2.3583528037383178e-05,
1556
- "loss": 5.5168,
1557
- "step": 2210
1558
- },
1559
- {
1560
- "epoch": 4.644351464435147,
1561
- "grad_norm": 4.335892200469971,
1562
- "learning_rate": 2.34375e-05,
1563
- "loss": 5.5225,
1564
- "step": 2220
1565
- },
1566
- {
1567
- "epoch": 4.665271966527197,
1568
- "grad_norm": 4.631258010864258,
1569
- "learning_rate": 2.3291471962616823e-05,
1570
- "loss": 5.5719,
1571
- "step": 2230
1572
- },
1573
- {
1574
- "epoch": 4.686192468619247,
1575
- "grad_norm": 4.577663898468018,
1576
- "learning_rate": 2.3145443925233646e-05,
1577
- "loss": 5.5244,
1578
- "step": 2240
1579
- },
1580
- {
1581
- "epoch": 4.707112970711297,
1582
- "grad_norm": 5.098531723022461,
1583
- "learning_rate": 2.299941588785047e-05,
1584
- "loss": 5.5396,
1585
- "step": 2250
1586
- },
1587
- {
1588
- "epoch": 4.7280334728033475,
1589
- "grad_norm": 4.742924690246582,
1590
- "learning_rate": 2.285338785046729e-05,
1591
- "loss": 5.5612,
1592
- "step": 2260
1593
- },
1594
- {
1595
- "epoch": 4.7489539748953975,
1596
- "grad_norm": 4.887773036956787,
1597
- "learning_rate": 2.2707359813084114e-05,
1598
- "loss": 5.5807,
1599
- "step": 2270
1600
- },
1601
- {
1602
- "epoch": 4.7698744769874475,
1603
- "grad_norm": 5.311403274536133,
1604
- "learning_rate": 2.2561331775700937e-05,
1605
- "loss": 5.5715,
1606
- "step": 2280
1607
- },
1608
- {
1609
- "epoch": 4.790794979079498,
1610
- "grad_norm": 4.41756534576416,
1611
- "learning_rate": 2.241530373831776e-05,
1612
- "loss": 5.5645,
1613
- "step": 2290
1614
- },
1615
- {
1616
- "epoch": 4.811715481171548,
1617
- "grad_norm": 5.780274868011475,
1618
- "learning_rate": 2.226927570093458e-05,
1619
- "loss": 5.5878,
1620
- "step": 2300
1621
- },
1622
- {
1623
- "epoch": 4.832635983263598,
1624
- "grad_norm": 4.756994247436523,
1625
- "learning_rate": 2.21232476635514e-05,
1626
- "loss": 5.5706,
1627
- "step": 2310
1628
- },
1629
- {
1630
- "epoch": 4.853556485355648,
1631
- "grad_norm": 4.693044185638428,
1632
- "learning_rate": 2.1977219626168224e-05,
1633
- "loss": 5.5582,
1634
- "step": 2320
1635
- },
1636
- {
1637
- "epoch": 4.874476987447698,
1638
- "grad_norm": 4.866166591644287,
1639
- "learning_rate": 2.1831191588785047e-05,
1640
- "loss": 5.5598,
1641
- "step": 2330
1642
- },
1643
- {
1644
- "epoch": 4.895397489539749,
1645
- "grad_norm": 5.382452011108398,
1646
- "learning_rate": 2.1685163551401873e-05,
1647
- "loss": 5.5402,
1648
- "step": 2340
1649
- },
1650
- {
1651
- "epoch": 4.916317991631799,
1652
- "grad_norm": 4.551178455352783,
1653
- "learning_rate": 2.1539135514018692e-05,
1654
- "loss": 5.5733,
1655
- "step": 2350
1656
- },
1657
- {
1658
- "epoch": 4.937238493723849,
1659
- "grad_norm": 5.513873100280762,
1660
- "learning_rate": 2.1393107476635515e-05,
1661
- "loss": 5.5659,
1662
- "step": 2360
1663
- },
1664
- {
1665
- "epoch": 4.9581589958159,
1666
- "grad_norm": 4.788096904754639,
1667
- "learning_rate": 2.1247079439252338e-05,
1668
- "loss": 5.5579,
1669
- "step": 2370
1670
- },
1671
- {
1672
- "epoch": 4.97907949790795,
1673
- "grad_norm": 5.45959997177124,
1674
- "learning_rate": 2.110105140186916e-05,
1675
- "loss": 5.5479,
1676
- "step": 2380
1677
- },
1678
- {
1679
- "epoch": 5.0,
1680
- "grad_norm": 4.460362911224365,
1681
- "learning_rate": 2.0955023364485983e-05,
1682
- "loss": 5.5359,
1683
- "step": 2390
1684
- },
1685
- {
1686
- "epoch": 5.02092050209205,
1687
- "grad_norm": 4.2076592445373535,
1688
- "learning_rate": 2.0808995327102803e-05,
1689
- "loss": 5.393,
1690
- "step": 2400
1691
- },
1692
- {
1693
- "epoch": 5.0418410041841,
1694
- "grad_norm": 5.154625415802002,
1695
- "learning_rate": 2.0662967289719625e-05,
1696
- "loss": 5.4034,
1697
- "step": 2410
1698
- },
1699
- {
1700
- "epoch": 5.062761506276151,
1701
- "grad_norm": 4.607546806335449,
1702
- "learning_rate": 2.051693925233645e-05,
1703
- "loss": 5.4052,
1704
- "step": 2420
1705
- },
1706
- {
1707
- "epoch": 5.083682008368201,
1708
- "grad_norm": 4.8969316482543945,
1709
- "learning_rate": 2.0370911214953274e-05,
1710
- "loss": 5.4045,
1711
- "step": 2430
1712
- },
1713
- {
1714
- "epoch": 5.104602510460251,
1715
- "grad_norm": 4.611139297485352,
1716
- "learning_rate": 2.0224883177570093e-05,
1717
- "loss": 5.4025,
1718
- "step": 2440
1719
- },
1720
- {
1721
- "epoch": 5.125523012552302,
1722
- "grad_norm": 4.330108642578125,
1723
- "learning_rate": 2.0078855140186916e-05,
1724
- "loss": 5.3758,
1725
- "step": 2450
1726
- },
1727
- {
1728
- "epoch": 5.146443514644352,
1729
- "grad_norm": 4.173985004425049,
1730
- "learning_rate": 1.993282710280374e-05,
1731
- "loss": 5.4351,
1732
- "step": 2460
1733
- },
1734
- {
1735
- "epoch": 5.167364016736402,
1736
- "grad_norm": 4.152027606964111,
1737
- "learning_rate": 1.978679906542056e-05,
1738
- "loss": 5.4315,
1739
- "step": 2470
1740
- },
1741
- {
1742
- "epoch": 5.188284518828452,
1743
- "grad_norm": 5.017761707305908,
1744
- "learning_rate": 1.9640771028037384e-05,
1745
- "loss": 5.4015,
1746
- "step": 2480
1747
- },
1748
- {
1749
- "epoch": 5.209205020920502,
1750
- "grad_norm": 4.6566643714904785,
1751
- "learning_rate": 1.9494742990654207e-05,
1752
- "loss": 5.4487,
1753
- "step": 2490
1754
- },
1755
- {
1756
- "epoch": 5.2301255230125525,
1757
- "grad_norm": 4.430664539337158,
1758
- "learning_rate": 1.934871495327103e-05,
1759
- "loss": 5.4582,
1760
- "step": 2500
1761
- },
1762
- {
1763
- "epoch": 5.2510460251046025,
1764
- "grad_norm": 4.494618892669678,
1765
- "learning_rate": 1.9202686915887852e-05,
1766
- "loss": 5.453,
1767
- "step": 2510
1768
- },
1769
- {
1770
- "epoch": 5.2719665271966525,
1771
- "grad_norm": 4.5483832359313965,
1772
- "learning_rate": 1.9056658878504675e-05,
1773
- "loss": 5.4303,
1774
- "step": 2520
1775
- },
1776
- {
1777
- "epoch": 5.292887029288703,
1778
- "grad_norm": 4.876987934112549,
1779
- "learning_rate": 1.8910630841121494e-05,
1780
- "loss": 5.4322,
1781
- "step": 2530
1782
- },
1783
- {
1784
- "epoch": 5.313807531380753,
1785
- "grad_norm": 4.776066303253174,
1786
- "learning_rate": 1.8764602803738317e-05,
1787
- "loss": 5.4105,
1788
- "step": 2540
1789
- },
1790
- {
1791
- "epoch": 5.334728033472803,
1792
- "grad_norm": 4.958277225494385,
1793
- "learning_rate": 1.861857476635514e-05,
1794
- "loss": 5.3658,
1795
- "step": 2550
1796
- },
1797
- {
1798
- "epoch": 5.355648535564853,
1799
- "grad_norm": 4.768832206726074,
1800
- "learning_rate": 1.8472546728971963e-05,
1801
- "loss": 5.417,
1802
- "step": 2560
1803
- },
1804
- {
1805
- "epoch": 5.376569037656903,
1806
- "grad_norm": 4.271088600158691,
1807
- "learning_rate": 1.832651869158879e-05,
1808
- "loss": 5.4015,
1809
- "step": 2570
1810
- },
1811
- {
1812
- "epoch": 5.397489539748954,
1813
- "grad_norm": 5.521544456481934,
1814
- "learning_rate": 1.8180490654205608e-05,
1815
- "loss": 5.3982,
1816
- "step": 2580
1817
- },
1818
- {
1819
- "epoch": 5.418410041841004,
1820
- "grad_norm": 4.810739040374756,
1821
- "learning_rate": 1.803446261682243e-05,
1822
- "loss": 5.4013,
1823
- "step": 2590
1824
- },
1825
- {
1826
- "epoch": 5.439330543933054,
1827
- "grad_norm": 4.353354454040527,
1828
- "learning_rate": 1.7888434579439253e-05,
1829
- "loss": 5.4112,
1830
- "step": 2600
1831
- },
1832
- {
1833
- "epoch": 5.460251046025105,
1834
- "grad_norm": 4.496838569641113,
1835
- "learning_rate": 1.7742406542056076e-05,
1836
- "loss": 5.4245,
1837
- "step": 2610
1838
- },
1839
- {
1840
- "epoch": 5.481171548117155,
1841
- "grad_norm": 4.632349014282227,
1842
- "learning_rate": 1.75963785046729e-05,
1843
- "loss": 5.3941,
1844
- "step": 2620
1845
- },
1846
- {
1847
- "epoch": 5.502092050209205,
1848
- "grad_norm": 4.464936256408691,
1849
- "learning_rate": 1.7450350467289718e-05,
1850
- "loss": 5.4315,
1851
- "step": 2630
1852
- },
1853
- {
1854
- "epoch": 5.523012552301255,
1855
- "grad_norm": 5.022765159606934,
1856
- "learning_rate": 1.730432242990654e-05,
1857
- "loss": 5.4,
1858
- "step": 2640
1859
- },
1860
- {
1861
- "epoch": 5.543933054393305,
1862
- "grad_norm": 4.089568614959717,
1863
- "learning_rate": 1.7158294392523367e-05,
1864
- "loss": 5.4026,
1865
- "step": 2650
1866
- },
1867
- {
1868
- "epoch": 5.564853556485356,
1869
- "grad_norm": 4.395118713378906,
1870
- "learning_rate": 1.701226635514019e-05,
1871
- "loss": 5.3902,
1872
- "step": 2660
1873
- },
1874
- {
1875
- "epoch": 5.585774058577406,
1876
- "grad_norm": 4.480564117431641,
1877
- "learning_rate": 1.686623831775701e-05,
1878
- "loss": 5.4363,
1879
- "step": 2670
1880
- },
1881
- {
1882
- "epoch": 5.606694560669456,
1883
- "grad_norm": 4.185891628265381,
1884
- "learning_rate": 1.672021028037383e-05,
1885
- "loss": 5.4802,
1886
- "step": 2680
1887
- },
1888
- {
1889
- "epoch": 5.627615062761507,
1890
- "grad_norm": 4.2380781173706055,
1891
- "learning_rate": 1.6574182242990654e-05,
1892
- "loss": 5.4399,
1893
- "step": 2690
1894
- },
1895
- {
1896
- "epoch": 5.648535564853557,
1897
- "grad_norm": 4.757248878479004,
1898
- "learning_rate": 1.6428154205607477e-05,
1899
- "loss": 5.3782,
1900
- "step": 2700
1901
- },
1902
- {
1903
- "epoch": 5.669456066945607,
1904
- "grad_norm": 4.615571022033691,
1905
- "learning_rate": 1.62821261682243e-05,
1906
- "loss": 5.4154,
1907
- "step": 2710
1908
- },
1909
- {
1910
- "epoch": 5.690376569037657,
1911
- "grad_norm": 4.125430107116699,
1912
- "learning_rate": 1.6136098130841123e-05,
1913
- "loss": 5.398,
1914
- "step": 2720
1915
- },
1916
- {
1917
- "epoch": 5.711297071129707,
1918
- "grad_norm": 4.771024227142334,
1919
- "learning_rate": 1.5990070093457945e-05,
1920
- "loss": 5.4251,
1921
- "step": 2730
1922
- },
1923
- {
1924
- "epoch": 5.7322175732217575,
1925
- "grad_norm": 4.4113664627075195,
1926
- "learning_rate": 1.5844042056074768e-05,
1927
- "loss": 5.4073,
1928
- "step": 2740
1929
- },
1930
- {
1931
- "epoch": 5.7531380753138075,
1932
- "grad_norm": 4.691117286682129,
1933
- "learning_rate": 1.569801401869159e-05,
1934
- "loss": 5.3625,
1935
- "step": 2750
1936
- },
1937
- {
1938
- "epoch": 5.7740585774058575,
1939
- "grad_norm": 4.399471282958984,
1940
- "learning_rate": 1.5551985981308413e-05,
1941
- "loss": 5.4223,
1942
- "step": 2760
1943
- },
1944
- {
1945
- "epoch": 5.794979079497908,
1946
- "grad_norm": 4.342303276062012,
1947
- "learning_rate": 1.5405957943925233e-05,
1948
- "loss": 5.4336,
1949
- "step": 2770
1950
- },
1951
- {
1952
- "epoch": 5.815899581589958,
1953
- "grad_norm": 4.583115577697754,
1954
- "learning_rate": 1.5259929906542055e-05,
1955
- "loss": 5.4044,
1956
- "step": 2780
1957
- },
1958
- {
1959
- "epoch": 5.836820083682008,
1960
- "grad_norm": 4.574479579925537,
1961
- "learning_rate": 1.5113901869158878e-05,
1962
- "loss": 5.3913,
1963
- "step": 2790
1964
- },
1965
- {
1966
- "epoch": 5.857740585774058,
1967
- "grad_norm": 4.470442295074463,
1968
- "learning_rate": 1.4967873831775703e-05,
1969
- "loss": 5.401,
1970
- "step": 2800
1971
- },
1972
- {
1973
- "epoch": 5.878661087866108,
1974
- "grad_norm": 5.023131847381592,
1975
- "learning_rate": 1.4821845794392525e-05,
1976
- "loss": 5.3922,
1977
- "step": 2810
1978
- },
1979
- {
1980
- "epoch": 5.899581589958159,
1981
- "grad_norm": 4.9364824295043945,
1982
- "learning_rate": 1.4675817757009346e-05,
1983
- "loss": 5.4227,
1984
- "step": 2820
1985
- },
1986
- {
1987
- "epoch": 5.920502092050209,
1988
- "grad_norm": 4.783654689788818,
1989
- "learning_rate": 1.4529789719626169e-05,
1990
- "loss": 5.388,
1991
- "step": 2830
1992
- },
1993
- {
1994
- "epoch": 5.941422594142259,
1995
- "grad_norm": 4.577333450317383,
1996
- "learning_rate": 1.4383761682242992e-05,
1997
- "loss": 5.4557,
1998
- "step": 2840
1999
- },
2000
- {
2001
- "epoch": 5.96234309623431,
2002
- "grad_norm": 5.692765235900879,
2003
- "learning_rate": 1.4237733644859813e-05,
2004
- "loss": 5.4204,
2005
- "step": 2850
2006
- },
2007
- {
2008
- "epoch": 5.98326359832636,
2009
- "grad_norm": 4.250858306884766,
2010
- "learning_rate": 1.4091705607476635e-05,
2011
- "loss": 5.3882,
2012
- "step": 2860
2013
- },
2014
- {
2015
- "epoch": 6.00418410041841,
2016
- "grad_norm": 3.9449563026428223,
2017
- "learning_rate": 1.394567757009346e-05,
2018
- "loss": 5.3856,
2019
- "step": 2870
2020
- },
2021
- {
2022
- "epoch": 6.02510460251046,
2023
- "grad_norm": 4.342050552368164,
2024
- "learning_rate": 1.3799649532710283e-05,
2025
- "loss": 5.3484,
2026
- "step": 2880
2027
- },
2028
- {
2029
- "epoch": 6.046025104602511,
2030
- "grad_norm": 4.476908206939697,
2031
- "learning_rate": 1.3653621495327104e-05,
2032
- "loss": 5.319,
2033
- "step": 2890
2034
- },
2035
- {
2036
- "epoch": 6.066945606694561,
2037
- "grad_norm": 4.24432897567749,
2038
- "learning_rate": 1.3507593457943926e-05,
2039
- "loss": 5.3257,
2040
- "step": 2900
2041
- },
2042
- {
2043
- "epoch": 6.087866108786611,
2044
- "grad_norm": 4.31618595123291,
2045
- "learning_rate": 1.3361565420560749e-05,
2046
- "loss": 5.322,
2047
- "step": 2910
2048
- },
2049
- {
2050
- "epoch": 6.108786610878661,
2051
- "grad_norm": 4.380491733551025,
2052
- "learning_rate": 1.321553738317757e-05,
2053
- "loss": 5.2856,
2054
- "step": 2920
2055
- },
2056
- {
2057
- "epoch": 6.129707112970712,
2058
- "grad_norm": 4.4902567863464355,
2059
- "learning_rate": 1.3069509345794393e-05,
2060
- "loss": 5.3467,
2061
- "step": 2930
2062
- },
2063
- {
2064
- "epoch": 6.150627615062762,
2065
- "grad_norm": 3.720334768295288,
2066
- "learning_rate": 1.2923481308411214e-05,
2067
- "loss": 5.3036,
2068
- "step": 2940
2069
- },
2070
- {
2071
- "epoch": 6.171548117154812,
2072
- "grad_norm": 4.389863967895508,
2073
- "learning_rate": 1.277745327102804e-05,
2074
- "loss": 5.267,
2075
- "step": 2950
2076
- },
2077
- {
2078
- "epoch": 6.192468619246862,
2079
- "grad_norm": 3.921224355697632,
2080
- "learning_rate": 1.263142523364486e-05,
2081
- "loss": 5.3202,
2082
- "step": 2960
2083
- },
2084
- {
2085
- "epoch": 6.2133891213389125,
2086
- "grad_norm": 4.181203842163086,
2087
- "learning_rate": 1.2485397196261684e-05,
2088
- "loss": 5.3051,
2089
- "step": 2970
2090
- },
2091
- {
2092
- "epoch": 6.2343096234309625,
2093
- "grad_norm": 3.9604923725128174,
2094
- "learning_rate": 1.2339369158878506e-05,
2095
- "loss": 5.3289,
2096
- "step": 2980
2097
- },
2098
- {
2099
- "epoch": 6.2552301255230125,
2100
- "grad_norm": 4.156136512756348,
2101
- "learning_rate": 1.2193341121495327e-05,
2102
- "loss": 5.2953,
2103
- "step": 2990
2104
- },
2105
- {
2106
- "epoch": 6.2761506276150625,
2107
- "grad_norm": 4.175608158111572,
2108
- "learning_rate": 1.204731308411215e-05,
2109
- "loss": 5.3203,
2110
- "step": 3000
2111
- },
2112
- {
2113
- "epoch": 6.297071129707113,
2114
- "grad_norm": 4.1036696434021,
2115
- "learning_rate": 1.1901285046728973e-05,
2116
- "loss": 5.3116,
2117
- "step": 3010
2118
- },
2119
- {
2120
- "epoch": 6.317991631799163,
2121
- "grad_norm": 4.54881477355957,
2122
- "learning_rate": 1.1755257009345795e-05,
2123
- "loss": 5.2957,
2124
- "step": 3020
2125
- },
2126
- {
2127
- "epoch": 6.338912133891213,
2128
- "grad_norm": 4.8235955238342285,
2129
- "learning_rate": 1.1609228971962616e-05,
2130
- "loss": 5.3388,
2131
- "step": 3030
2132
- },
2133
- {
2134
- "epoch": 6.359832635983263,
2135
- "grad_norm": 4.378243446350098,
2136
- "learning_rate": 1.1463200934579439e-05,
2137
- "loss": 5.3191,
2138
- "step": 3040
2139
- },
2140
- {
2141
- "epoch": 6.380753138075314,
2142
- "grad_norm": 4.352092742919922,
2143
- "learning_rate": 1.1317172897196262e-05,
2144
- "loss": 5.338,
2145
- "step": 3050
2146
- },
2147
- {
2148
- "epoch": 6.401673640167364,
2149
- "grad_norm": 4.318615436553955,
2150
- "learning_rate": 1.1171144859813085e-05,
2151
- "loss": 5.3555,
2152
- "step": 3060
2153
- },
2154
- {
2155
- "epoch": 6.422594142259414,
2156
- "grad_norm": 4.34386682510376,
2157
- "learning_rate": 1.1025116822429907e-05,
2158
- "loss": 5.3297,
2159
- "step": 3070
2160
- },
2161
- {
2162
- "epoch": 6.443514644351464,
2163
- "grad_norm": 4.626368045806885,
2164
- "learning_rate": 1.087908878504673e-05,
2165
- "loss": 5.3231,
2166
- "step": 3080
2167
- },
2168
- {
2169
- "epoch": 6.464435146443515,
2170
- "grad_norm": 5.051966667175293,
2171
- "learning_rate": 1.0733060747663553e-05,
2172
- "loss": 5.2995,
2173
- "step": 3090
2174
- },
2175
- {
2176
- "epoch": 6.485355648535565,
2177
- "grad_norm": 4.084916591644287,
2178
- "learning_rate": 1.0587032710280374e-05,
2179
- "loss": 5.3096,
2180
- "step": 3100
2181
- },
2182
- {
2183
- "epoch": 6.506276150627615,
2184
- "grad_norm": 4.627449989318848,
2185
- "learning_rate": 1.0441004672897196e-05,
2186
- "loss": 5.3051,
2187
- "step": 3110
2188
- },
2189
- {
2190
- "epoch": 6.527196652719665,
2191
- "grad_norm": 4.023199081420898,
2192
- "learning_rate": 1.0294976635514019e-05,
2193
- "loss": 5.3299,
2194
- "step": 3120
2195
- },
2196
- {
2197
- "epoch": 6.548117154811716,
2198
- "grad_norm": 4.522974491119385,
2199
- "learning_rate": 1.0148948598130842e-05,
2200
- "loss": 5.3096,
2201
- "step": 3130
2202
- },
2203
- {
2204
- "epoch": 6.569037656903766,
2205
- "grad_norm": 4.206942081451416,
2206
- "learning_rate": 1.0002920560747665e-05,
2207
- "loss": 5.3315,
2208
- "step": 3140
2209
- },
2210
- {
2211
- "epoch": 6.589958158995816,
2212
- "grad_norm": 4.5326409339904785,
2213
- "learning_rate": 9.856892523364486e-06,
2214
- "loss": 5.3204,
2215
- "step": 3150
2216
- },
2217
- {
2218
- "epoch": 6.610878661087866,
2219
- "grad_norm": 4.2639923095703125,
2220
- "learning_rate": 9.71086448598131e-06,
2221
- "loss": 5.3027,
2222
- "step": 3160
2223
- },
2224
- {
2225
- "epoch": 6.631799163179917,
2226
- "grad_norm": 4.675206661224365,
2227
- "learning_rate": 9.564836448598131e-06,
2228
- "loss": 5.3199,
2229
- "step": 3170
2230
- },
2231
- {
2232
- "epoch": 6.652719665271967,
2233
- "grad_norm": 4.079299449920654,
2234
- "learning_rate": 9.418808411214954e-06,
2235
- "loss": 5.3314,
2236
- "step": 3180
2237
- },
2238
- {
2239
- "epoch": 6.673640167364017,
2240
- "grad_norm": 5.061886787414551,
2241
- "learning_rate": 9.272780373831776e-06,
2242
- "loss": 5.3272,
2243
- "step": 3190
2244
- },
2245
- {
2246
- "epoch": 6.694560669456067,
2247
- "grad_norm": 4.1819539070129395,
2248
- "learning_rate": 9.126752336448599e-06,
2249
- "loss": 5.3524,
2250
- "step": 3200
2251
- },
2252
- {
2253
- "epoch": 6.7154811715481175,
2254
- "grad_norm": 4.352437496185303,
2255
- "learning_rate": 8.980724299065422e-06,
2256
- "loss": 5.309,
2257
- "step": 3210
2258
- },
2259
- {
2260
- "epoch": 6.7364016736401675,
2261
- "grad_norm": 4.485301971435547,
2262
- "learning_rate": 8.834696261682243e-06,
2263
- "loss": 5.3386,
2264
- "step": 3220
2265
- },
2266
- {
2267
- "epoch": 6.7573221757322175,
2268
- "grad_norm": 4.056819438934326,
2269
- "learning_rate": 8.688668224299066e-06,
2270
- "loss": 5.333,
2271
- "step": 3230
2272
- },
2273
- {
2274
- "epoch": 6.7782426778242675,
2275
- "grad_norm": 4.242626190185547,
2276
- "learning_rate": 8.542640186915888e-06,
2277
- "loss": 5.2945,
2278
- "step": 3240
2279
- },
2280
- {
2281
- "epoch": 6.799163179916318,
2282
- "grad_norm": 4.980265140533447,
2283
- "learning_rate": 8.396612149532711e-06,
2284
- "loss": 5.3282,
2285
- "step": 3250
2286
- },
2287
- {
2288
- "epoch": 6.820083682008368,
2289
- "grad_norm": 4.217011451721191,
2290
- "learning_rate": 8.250584112149532e-06,
2291
- "loss": 5.3488,
2292
- "step": 3260
2293
- },
2294
- {
2295
- "epoch": 6.841004184100418,
2296
- "grad_norm": 4.251449108123779,
2297
- "learning_rate": 8.104556074766355e-06,
2298
- "loss": 5.3019,
2299
- "step": 3270
2300
- },
2301
- {
2302
- "epoch": 6.861924686192468,
2303
- "grad_norm": 4.524305820465088,
2304
- "learning_rate": 7.958528037383179e-06,
2305
- "loss": 5.3548,
2306
- "step": 3280
2307
- },
2308
- {
2309
- "epoch": 6.882845188284519,
2310
- "grad_norm": 4.697112083435059,
2311
- "learning_rate": 7.8125e-06,
2312
- "loss": 5.3154,
2313
- "step": 3290
2314
- },
2315
- {
2316
- "epoch": 6.903765690376569,
2317
- "grad_norm": 4.241893768310547,
2318
- "learning_rate": 7.666471962616823e-06,
2319
- "loss": 5.2998,
2320
- "step": 3300
2321
- },
2322
- {
2323
- "epoch": 6.924686192468619,
2324
- "grad_norm": 4.812580585479736,
2325
- "learning_rate": 7.520443925233646e-06,
2326
- "loss": 5.3335,
2327
- "step": 3310
2328
- },
2329
- {
2330
- "epoch": 6.945606694560669,
2331
- "grad_norm": 4.38694953918457,
2332
- "learning_rate": 7.374415887850468e-06,
2333
- "loss": 5.3257,
2334
- "step": 3320
2335
- },
2336
- {
2337
- "epoch": 6.96652719665272,
2338
- "grad_norm": 4.016705513000488,
2339
- "learning_rate": 7.22838785046729e-06,
2340
- "loss": 5.3378,
2341
- "step": 3330
2342
- },
2343
- {
2344
- "epoch": 6.98744769874477,
2345
- "grad_norm": 4.219436168670654,
2346
- "learning_rate": 7.082359813084112e-06,
2347
- "loss": 5.3092,
2348
- "step": 3340
2349
- },
2350
- {
2351
- "epoch": 7.00836820083682,
2352
- "grad_norm": 3.934561252593994,
2353
- "learning_rate": 6.936331775700936e-06,
2354
- "loss": 5.3018,
2355
- "step": 3350
2356
- },
2357
- {
2358
- "epoch": 7.02928870292887,
2359
- "grad_norm": 3.8431551456451416,
2360
- "learning_rate": 6.7903037383177575e-06,
2361
- "loss": 5.2902,
2362
- "step": 3360
2363
- },
2364
- {
2365
- "epoch": 7.050209205020921,
2366
- "grad_norm": 4.321191787719727,
2367
- "learning_rate": 6.644275700934579e-06,
2368
- "loss": 5.3228,
2369
- "step": 3370
2370
- },
2371
- {
2372
- "epoch": 7.071129707112971,
2373
- "grad_norm": 3.7915546894073486,
2374
- "learning_rate": 6.498247663551402e-06,
2375
- "loss": 5.2887,
2376
- "step": 3380
2377
- },
2378
- {
2379
- "epoch": 7.092050209205021,
2380
- "grad_norm": 3.980278491973877,
2381
- "learning_rate": 6.352219626168225e-06,
2382
- "loss": 5.2903,
2383
- "step": 3390
2384
- },
2385
- {
2386
- "epoch": 7.112970711297071,
2387
- "grad_norm": 3.8715929985046387,
2388
- "learning_rate": 6.2061915887850475e-06,
2389
- "loss": 5.2657,
2390
- "step": 3400
2391
- },
2392
- {
2393
- "epoch": 7.133891213389122,
2394
- "grad_norm": 4.0379958152771,
2395
- "learning_rate": 6.060163551401869e-06,
2396
- "loss": 5.2803,
2397
- "step": 3410
2398
- },
2399
- {
2400
- "epoch": 7.154811715481172,
2401
- "grad_norm": 4.571746826171875,
2402
- "learning_rate": 5.914135514018692e-06,
2403
- "loss": 5.2885,
2404
- "step": 3420
2405
- },
2406
- {
2407
- "epoch": 7.175732217573222,
2408
- "grad_norm": 4.301372528076172,
2409
- "learning_rate": 5.768107476635514e-06,
2410
- "loss": 5.2932,
2411
- "step": 3430
2412
- },
2413
- {
2414
- "epoch": 7.196652719665272,
2415
- "grad_norm": 4.016634941101074,
2416
- "learning_rate": 5.622079439252337e-06,
2417
- "loss": 5.2769,
2418
- "step": 3440
2419
- },
2420
- {
2421
- "epoch": 7.2175732217573225,
2422
- "grad_norm": 4.037191390991211,
2423
- "learning_rate": 5.4760514018691585e-06,
2424
- "loss": 5.2424,
2425
- "step": 3450
2426
- },
2427
- {
2428
- "epoch": 7.2384937238493725,
2429
- "grad_norm": 4.102557182312012,
2430
- "learning_rate": 5.330023364485982e-06,
2431
- "loss": 5.2526,
2432
- "step": 3460
2433
- },
2434
- {
2435
- "epoch": 7.2594142259414225,
2436
- "grad_norm": 3.814467191696167,
2437
- "learning_rate": 5.183995327102804e-06,
2438
- "loss": 5.2746,
2439
- "step": 3470
2440
- },
2441
- {
2442
- "epoch": 7.2803347280334725,
2443
- "grad_norm": 4.726773262023926,
2444
- "learning_rate": 5.037967289719627e-06,
2445
- "loss": 5.2754,
2446
- "step": 3480
2447
- },
2448
- {
2449
- "epoch": 7.301255230125523,
2450
- "grad_norm": 4.026674747467041,
2451
- "learning_rate": 4.8919392523364485e-06,
2452
- "loss": 5.2813,
2453
- "step": 3490
2454
- },
2455
- {
2456
- "epoch": 7.322175732217573,
2457
- "grad_norm": 4.189694404602051,
2458
- "learning_rate": 4.745911214953271e-06,
2459
- "loss": 5.3045,
2460
- "step": 3500
2461
- },
2462
- {
2463
- "epoch": 7.343096234309623,
2464
- "grad_norm": 3.8642845153808594,
2465
- "learning_rate": 4.599883177570094e-06,
2466
- "loss": 5.2797,
2467
- "step": 3510
2468
- },
2469
- {
2470
- "epoch": 7.364016736401673,
2471
- "grad_norm": 3.7599406242370605,
2472
- "learning_rate": 4.453855140186916e-06,
2473
- "loss": 5.2847,
2474
- "step": 3520
2475
- },
2476
- {
2477
- "epoch": 7.384937238493724,
2478
- "grad_norm": 4.120841979980469,
2479
- "learning_rate": 4.3078271028037385e-06,
2480
- "loss": 5.2656,
2481
- "step": 3530
2482
- },
2483
- {
2484
- "epoch": 7.405857740585774,
2485
- "grad_norm": 3.9323008060455322,
2486
- "learning_rate": 4.161799065420561e-06,
2487
- "loss": 5.2413,
2488
- "step": 3540
2489
- },
2490
- {
2491
- "epoch": 7.426778242677824,
2492
- "grad_norm": 4.041280269622803,
2493
- "learning_rate": 4.015771028037384e-06,
2494
- "loss": 5.2712,
2495
- "step": 3550
2496
- },
2497
- {
2498
- "epoch": 7.447698744769874,
2499
- "grad_norm": 3.7465624809265137,
2500
- "learning_rate": 3.869742990654206e-06,
2501
- "loss": 5.2625,
2502
- "step": 3560
2503
- },
2504
- {
2505
- "epoch": 7.468619246861925,
2506
- "grad_norm": 4.278369903564453,
2507
- "learning_rate": 3.7237149532710285e-06,
2508
- "loss": 5.2953,
2509
- "step": 3570
2510
- },
2511
- {
2512
- "epoch": 7.489539748953975,
2513
- "grad_norm": 4.383647918701172,
2514
- "learning_rate": 3.5776869158878503e-06,
2515
- "loss": 5.2594,
2516
- "step": 3580
2517
- },
2518
- {
2519
- "epoch": 7.510460251046025,
2520
- "grad_norm": 4.134693145751953,
2521
- "learning_rate": 3.4316588785046735e-06,
2522
- "loss": 5.2683,
2523
- "step": 3590
2524
- },
2525
- {
2526
- "epoch": 7.531380753138075,
2527
- "grad_norm": 4.535808563232422,
2528
- "learning_rate": 3.2856308411214953e-06,
2529
- "loss": 5.3007,
2530
- "step": 3600
2531
- },
2532
- {
2533
- "epoch": 7.552301255230126,
2534
- "grad_norm": 3.6642744541168213,
2535
- "learning_rate": 3.139602803738318e-06,
2536
- "loss": 5.2831,
2537
- "step": 3610
2538
- },
2539
- {
2540
- "epoch": 7.573221757322176,
2541
- "grad_norm": 4.104931354522705,
2542
- "learning_rate": 2.9935747663551403e-06,
2543
- "loss": 5.2811,
2544
- "step": 3620
2545
- },
2546
- {
2547
- "epoch": 7.594142259414226,
2548
- "grad_norm": 4.1342620849609375,
2549
- "learning_rate": 2.847546728971963e-06,
2550
- "loss": 5.2442,
2551
- "step": 3630
2552
- },
2553
- {
2554
- "epoch": 7.615062761506276,
2555
- "grad_norm": 4.103805065155029,
2556
- "learning_rate": 2.7015186915887853e-06,
2557
- "loss": 5.2564,
2558
- "step": 3640
2559
- },
2560
- {
2561
- "epoch": 7.635983263598327,
2562
- "grad_norm": 4.066782474517822,
2563
- "learning_rate": 2.5554906542056076e-06,
2564
- "loss": 5.2953,
2565
- "step": 3650
2566
- },
2567
- {
2568
- "epoch": 7.656903765690377,
2569
- "grad_norm": 4.145108222961426,
2570
- "learning_rate": 2.40946261682243e-06,
2571
- "loss": 5.2727,
2572
- "step": 3660
2573
- },
2574
- {
2575
- "epoch": 7.677824267782427,
2576
- "grad_norm": 4.3926005363464355,
2577
- "learning_rate": 2.2634345794392526e-06,
2578
- "loss": 5.3046,
2579
- "step": 3670
2580
- },
2581
- {
2582
- "epoch": 7.698744769874477,
2583
- "grad_norm": 4.0597429275512695,
2584
- "learning_rate": 2.117406542056075e-06,
2585
- "loss": 5.2608,
2586
- "step": 3680
2587
- },
2588
- {
2589
- "epoch": 7.7196652719665275,
2590
- "grad_norm": 4.5489912033081055,
2591
- "learning_rate": 1.971378504672897e-06,
2592
- "loss": 5.2954,
2593
- "step": 3690
2594
- },
2595
- {
2596
- "epoch": 7.7405857740585775,
2597
- "grad_norm": 4.182638645172119,
2598
- "learning_rate": 1.8253504672897197e-06,
2599
- "loss": 5.2748,
2600
- "step": 3700
2601
- },
2602
- {
2603
- "epoch": 7.7615062761506275,
2604
- "grad_norm": 4.147724628448486,
2605
- "learning_rate": 1.6793224299065422e-06,
2606
- "loss": 5.2677,
2607
- "step": 3710
2608
- },
2609
- {
2610
- "epoch": 7.7824267782426775,
2611
- "grad_norm": 3.8664207458496094,
2612
- "learning_rate": 1.5332943925233645e-06,
2613
- "loss": 5.2624,
2614
- "step": 3720
2615
- },
2616
- {
2617
- "epoch": 7.803347280334728,
2618
- "grad_norm": 3.98372483253479,
2619
- "learning_rate": 1.387266355140187e-06,
2620
- "loss": 5.2795,
2621
- "step": 3730
2622
- },
2623
- {
2624
- "epoch": 7.824267782426778,
2625
- "grad_norm": 3.8752615451812744,
2626
- "learning_rate": 1.2412383177570093e-06,
2627
- "loss": 5.2963,
2628
- "step": 3740
2629
- },
2630
- {
2631
- "epoch": 7.845188284518828,
2632
- "grad_norm": 4.161229610443115,
2633
- "learning_rate": 1.0952102803738318e-06,
2634
- "loss": 5.2632,
2635
- "step": 3750
2636
- },
2637
- {
2638
- "epoch": 7.866108786610878,
2639
- "grad_norm": 3.801501750946045,
2640
- "learning_rate": 9.491822429906542e-07,
2641
- "loss": 5.2939,
2642
- "step": 3760
2643
- },
2644
- {
2645
- "epoch": 7.887029288702929,
2646
- "grad_norm": 4.177527904510498,
2647
- "learning_rate": 8.031542056074766e-07,
2648
- "loss": 5.2627,
2649
- "step": 3770
2650
- },
2651
- {
2652
- "epoch": 7.907949790794979,
2653
- "grad_norm": 3.7887067794799805,
2654
- "learning_rate": 6.571261682242991e-07,
2655
- "loss": 5.3115,
2656
- "step": 3780
2657
- },
2658
- {
2659
- "epoch": 7.928870292887029,
2660
- "grad_norm": 4.616532325744629,
2661
- "learning_rate": 5.110981308411215e-07,
2662
- "loss": 5.3224,
2663
- "step": 3790
2664
- },
2665
- {
2666
- "epoch": 7.949790794979079,
2667
- "grad_norm": 4.093145847320557,
2668
- "learning_rate": 3.6507009345794396e-07,
2669
- "loss": 5.2779,
2670
- "step": 3800
2671
- },
2672
- {
2673
- "epoch": 7.97071129707113,
2674
- "grad_norm": 4.308866024017334,
2675
- "learning_rate": 2.1904205607476636e-07,
2676
- "loss": 5.2804,
2677
- "step": 3810
2678
- },
2679
- {
2680
- "epoch": 7.99163179916318,
2681
- "grad_norm": 4.583337306976318,
2682
- "learning_rate": 7.301401869158879e-08,
2683
- "loss": 5.2955,
2684
- "step": 3820
2685
- }
2686
- ],
2687
- "logging_steps": 10,
2688
- "max_steps": 3824,
2689
- "num_input_tokens_seen": 0,
2690
- "num_train_epochs": 8,
2691
- "save_steps": 1000,
2692
- "stateful_callbacks": {
2693
- "TrainerControl": {
2694
- "args": {
2695
- "should_epoch_stop": false,
2696
- "should_evaluate": false,
2697
- "should_log": false,
2698
- "should_save": true,
2699
- "should_training_stop": true
2700
- },
2701
- "attributes": {}
2702
- }
2703
- },
2704
- "total_flos": 0.0,
2705
- "train_batch_size": 48,
2706
- "trial_name": null,
2707
- "trial_params": null
2708
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pretrain/one_image_layer1_pretrain_3824/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a54cd3d791538223d9fc1081ed1ca5b8f6ffba2e47f177259b0b69e54309ef57
3
- size 5624
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/config.json DELETED
@@ -1,54 +0,0 @@
1
- {
2
- "_vocab_size": 257216,
3
- "architectures": [
4
- "RoboPoint_Paligemma"
5
- ],
6
- "bos_token_id": 2,
7
- "eos_token_id": 1,
8
- "hidden_size": 2048,
9
- "image_token_index": 257152,
10
- "model_type": "paligemma",
11
- "pad_token_id": 0,
12
- "projection_dim": 2048,
13
- "text_config": {
14
- "attention_bias": false,
15
- "attention_dropout": 0.0,
16
- "head_dim": 256,
17
- "hidden_act": "gelu_pytorch_tanh",
18
- "hidden_activation": null,
19
- "hidden_size": 2048,
20
- "initializer_range": 0.02,
21
- "intermediate_size": 16384,
22
- "max_position_embeddings": 8192,
23
- "model_type": "gemma",
24
- "num_attention_heads": 8,
25
- "num_hidden_layers": 18,
26
- "num_image_tokens": 256,
27
- "num_key_value_heads": 1,
28
- "rms_norm_eps": 1e-06,
29
- "rope_theta": 10000.0,
30
- "torch_dtype": "bfloat16",
31
- "use_cache": true,
32
- "vocab_size": 257216
33
- },
34
- "torch_dtype": "bfloat16",
35
- "transformers_version": "4.51.3",
36
- "vision_config": {
37
- "attention_dropout": 0.0,
38
- "hidden_act": "gelu_pytorch_tanh",
39
- "hidden_size": 1152,
40
- "image_size": 224,
41
- "intermediate_size": 4304,
42
- "layer_norm_eps": 1e-06,
43
- "model_type": "siglip_vision_model",
44
- "num_attention_heads": 16,
45
- "num_channels": 3,
46
- "num_hidden_layers": 27,
47
- "num_image_tokens": 256,
48
- "patch_size": 14,
49
- "projection_dim": 2048,
50
- "projector_hidden_act": "gelu_fast",
51
- "torch_dtype": "bfloat16",
52
- "vision_use_head": false
53
- }
54
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 2,
4
- "eos_token_id": 1,
5
- "pad_token_id": 0,
6
- "transformers_version": "4.51.3"
7
- }
 
 
 
 
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a8d986910849a2afad3df2ebfedcead5058e03e1ac98e3277f8410adf886047b
3
- size 4985048384
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7e6f9df5044ca25ae2cf3a52ed1ea6692761b7614fffcf7f1a764dd5617e08d7
3
- size 2099036524
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/model.safetensors.index.json DELETED
@@ -1,620 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 7083997556
4
- },
5
- "weight_map": {
6
- "module.language_model.model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
- "module.language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
- "module.language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
- "module.language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
- "module.language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
- "module.language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
- "module.language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
- "module.language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
- "module.language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
- "module.language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
- "module.language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
- "module.language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
- "module.language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
- "module.language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
- "module.language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
- "module.language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
- "module.language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
- "module.language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
- "module.language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
- "module.language_model.model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
- "module.language_model.model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
- "module.language_model.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
- "module.language_model.model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
- "module.language_model.model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
- "module.language_model.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
- "module.language_model.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
- "module.language_model.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
- "module.language_model.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
- "module.language_model.model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
- "module.language_model.model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
- "module.language_model.model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
- "module.language_model.model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
- "module.language_model.model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
- "module.language_model.model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
- "module.language_model.model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
- "module.language_model.model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
- "module.language_model.model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
- "module.language_model.model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
- "module.language_model.model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
- "module.language_model.model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
- "module.language_model.model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
- "module.language_model.model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
- "module.language_model.model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
- "module.language_model.model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
- "module.language_model.model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
- "module.language_model.model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
- "module.language_model.model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
- "module.language_model.model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
- "module.language_model.model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
- "module.language_model.model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
- "module.language_model.model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
- "module.language_model.model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
- "module.language_model.model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
- "module.language_model.model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
- "module.language_model.model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
- "module.language_model.model.layers.14.input_layernorm.weight": "model-00002-of-00002.safetensors",
62
- "module.language_model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
63
- "module.language_model.model.layers.14.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
64
- "module.language_model.model.layers.14.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
65
- "module.language_model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
66
- "module.language_model.model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
- "module.language_model.model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
- "module.language_model.model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
- "module.language_model.model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
- "module.language_model.model.layers.15.input_layernorm.weight": "model-00002-of-00002.safetensors",
71
- "module.language_model.model.layers.15.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
72
- "module.language_model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
73
- "module.language_model.model.layers.15.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
74
- "module.language_model.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
75
- "module.language_model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
76
- "module.language_model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
77
- "module.language_model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
78
- "module.language_model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
79
- "module.language_model.model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
80
- "module.language_model.model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
81
- "module.language_model.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
82
- "module.language_model.model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
83
- "module.language_model.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
84
- "module.language_model.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
85
- "module.language_model.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
86
- "module.language_model.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
87
- "module.language_model.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
88
- "module.language_model.model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
89
- "module.language_model.model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
90
- "module.language_model.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
91
- "module.language_model.model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
92
- "module.language_model.model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
93
- "module.language_model.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
94
- "module.language_model.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
95
- "module.language_model.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
96
- "module.language_model.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
97
- "module.language_model.model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
98
- "module.language_model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
99
- "module.language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
- "module.language_model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
101
- "module.language_model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
102
- "module.language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
- "module.language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
- "module.language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
105
- "module.language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
106
- "module.language_model.model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
107
- "module.language_model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
108
- "module.language_model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
109
- "module.language_model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
110
- "module.language_model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
111
- "module.language_model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
112
- "module.language_model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
113
- "module.language_model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
114
- "module.language_model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
115
- "module.language_model.model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
- "module.language_model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
- "module.language_model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
- "module.language_model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
- "module.language_model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
- "module.language_model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
- "module.language_model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
- "module.language_model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
- "module.language_model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
- "module.language_model.model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
125
- "module.language_model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
126
- "module.language_model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
127
- "module.language_model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
128
- "module.language_model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
129
- "module.language_model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
130
- "module.language_model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
131
- "module.language_model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
132
- "module.language_model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
133
- "module.language_model.model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
134
- "module.language_model.model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
135
- "module.language_model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
136
- "module.language_model.model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
137
- "module.language_model.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
138
- "module.language_model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
139
- "module.language_model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
140
- "module.language_model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
141
- "module.language_model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
142
- "module.language_model.model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
143
- "module.language_model.model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
144
- "module.language_model.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
145
- "module.language_model.model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
146
- "module.language_model.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
147
- "module.language_model.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
148
- "module.language_model.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
149
- "module.language_model.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
150
- "module.language_model.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
- "module.language_model.model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
152
- "module.language_model.model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
153
- "module.language_model.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
154
- "module.language_model.model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
155
- "module.language_model.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
156
- "module.language_model.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
157
- "module.language_model.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
158
- "module.language_model.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
159
- "module.language_model.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
160
- "module.language_model.model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
161
- "module.language_model.model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
162
- "module.language_model.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
163
- "module.language_model.model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
164
- "module.language_model.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
165
- "module.language_model.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
166
- "module.language_model.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
167
- "module.language_model.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
168
- "module.language_model.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
169
- "module.language_model.model.norm.weight": "model-00002-of-00002.safetensors",
170
- "module.multi_modal_projector.linear.bias": "model-00001-of-00002.safetensors",
171
- "module.multi_modal_projector.linear.weight": "model-00001-of-00002.safetensors",
172
- "module.up0.net_mask.0.bias": "model-00002-of-00002.safetensors",
173
- "module.up0.net_mask.0.weight": "model-00002-of-00002.safetensors",
174
- "module.up0.net_mask.2.bias": "model-00002-of-00002.safetensors",
175
- "module.up0.net_mask.2.weight": "model-00002-of-00002.safetensors",
176
- "module.up0.net_out.0.bias": "model-00002-of-00002.safetensors",
177
- "module.up0.net_out.0.weight": "model-00002-of-00002.safetensors",
178
- "module.up0.net_out.2.bias": "model-00002-of-00002.safetensors",
179
- "module.up0.net_out.2.weight": "model-00002-of-00002.safetensors",
180
- "module.up0.net_out.4.bias": "model-00002-of-00002.safetensors",
181
- "module.up0.net_out.4.weight": "model-00002-of-00002.safetensors",
182
- "module.vision_tower.vision_model.embeddings.patch_embedding.bias": "model-00001-of-00002.safetensors",
183
- "module.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00001-of-00002.safetensors",
184
- "module.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00001-of-00002.safetensors",
185
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00001-of-00002.safetensors",
186
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00001-of-00002.safetensors",
187
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00001-of-00002.safetensors",
188
- "module.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00001-of-00002.safetensors",
189
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00001-of-00002.safetensors",
190
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00001-of-00002.safetensors",
191
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00001-of-00002.safetensors",
192
- "module.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00001-of-00002.safetensors",
193
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
194
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
195
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
196
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
197
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
198
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
199
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
200
- "module.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
201
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00001-of-00002.safetensors",
202
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00001-of-00002.safetensors",
203
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00001-of-00002.safetensors",
204
- "module.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00001-of-00002.safetensors",
205
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00001-of-00002.safetensors",
206
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00001-of-00002.safetensors",
207
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00001-of-00002.safetensors",
208
- "module.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00001-of-00002.safetensors",
209
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
210
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
212
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
213
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
214
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
215
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
216
- "module.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
217
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00001-of-00002.safetensors",
218
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00001-of-00002.safetensors",
219
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00001-of-00002.safetensors",
220
- "module.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00001-of-00002.safetensors",
221
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00001-of-00002.safetensors",
222
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00001-of-00002.safetensors",
223
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00001-of-00002.safetensors",
224
- "module.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00001-of-00002.safetensors",
225
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
226
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
227
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
228
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
229
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
230
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
231
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
232
- "module.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00001-of-00002.safetensors",
234
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00001-of-00002.safetensors",
235
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00001-of-00002.safetensors",
236
- "module.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00001-of-00002.safetensors",
237
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00001-of-00002.safetensors",
238
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00001-of-00002.safetensors",
239
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00001-of-00002.safetensors",
240
- "module.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00001-of-00002.safetensors",
241
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
242
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
243
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
244
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
245
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
246
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
247
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
248
- "module.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
249
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00001-of-00002.safetensors",
250
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00001-of-00002.safetensors",
251
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00001-of-00002.safetensors",
252
- "module.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00001-of-00002.safetensors",
253
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00001-of-00002.safetensors",
254
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00001-of-00002.safetensors",
255
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00001-of-00002.safetensors",
256
- "module.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00001-of-00002.safetensors",
257
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
258
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
259
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
260
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
261
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
262
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
263
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
264
- "module.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
265
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00001-of-00002.safetensors",
266
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00001-of-00002.safetensors",
267
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00001-of-00002.safetensors",
268
- "module.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00001-of-00002.safetensors",
269
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00001-of-00002.safetensors",
270
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00001-of-00002.safetensors",
271
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00001-of-00002.safetensors",
272
- "module.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00001-of-00002.safetensors",
273
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
274
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
275
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
276
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
277
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
278
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
279
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
280
- "module.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
281
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00001-of-00002.safetensors",
282
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00001-of-00002.safetensors",
283
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00001-of-00002.safetensors",
284
- "module.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00001-of-00002.safetensors",
285
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00001-of-00002.safetensors",
286
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00001-of-00002.safetensors",
287
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00001-of-00002.safetensors",
288
- "module.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00001-of-00002.safetensors",
289
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
290
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
291
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
292
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
293
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
294
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
295
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
296
- "module.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
297
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00001-of-00002.safetensors",
298
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00001-of-00002.safetensors",
299
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00001-of-00002.safetensors",
300
- "module.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00001-of-00002.safetensors",
301
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00001-of-00002.safetensors",
302
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00001-of-00002.safetensors",
303
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00001-of-00002.safetensors",
304
- "module.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00001-of-00002.safetensors",
305
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
306
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
307
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
308
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
309
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
310
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
311
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
312
- "module.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
313
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00001-of-00002.safetensors",
314
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00001-of-00002.safetensors",
315
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00001-of-00002.safetensors",
316
- "module.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00001-of-00002.safetensors",
317
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00001-of-00002.safetensors",
318
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00001-of-00002.safetensors",
319
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00001-of-00002.safetensors",
320
- "module.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00001-of-00002.safetensors",
321
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
322
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
323
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
324
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
325
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
326
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
327
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
328
- "module.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
329
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00001-of-00002.safetensors",
330
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00001-of-00002.safetensors",
331
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00001-of-00002.safetensors",
332
- "module.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00001-of-00002.safetensors",
333
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00001-of-00002.safetensors",
334
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00001-of-00002.safetensors",
335
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00001-of-00002.safetensors",
336
- "module.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00001-of-00002.safetensors",
337
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
338
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
339
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
340
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
341
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
342
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
343
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
344
- "module.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
345
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00001-of-00002.safetensors",
346
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00001-of-00002.safetensors",
347
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00001-of-00002.safetensors",
348
- "module.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00001-of-00002.safetensors",
349
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00001-of-00002.safetensors",
350
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00001-of-00002.safetensors",
351
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00001-of-00002.safetensors",
352
- "module.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00001-of-00002.safetensors",
353
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
354
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
355
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
356
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
357
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
358
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
359
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
360
- "module.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
361
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00001-of-00002.safetensors",
362
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00001-of-00002.safetensors",
363
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00001-of-00002.safetensors",
364
- "module.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00001-of-00002.safetensors",
365
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00001-of-00002.safetensors",
366
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00001-of-00002.safetensors",
367
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00001-of-00002.safetensors",
368
- "module.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00001-of-00002.safetensors",
369
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
370
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
371
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
372
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
373
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
374
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
375
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
376
- "module.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
377
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00001-of-00002.safetensors",
378
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00001-of-00002.safetensors",
379
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00001-of-00002.safetensors",
380
- "module.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00001-of-00002.safetensors",
381
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00001-of-00002.safetensors",
382
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00001-of-00002.safetensors",
383
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00001-of-00002.safetensors",
384
- "module.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00001-of-00002.safetensors",
385
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
386
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
387
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
388
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
389
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
390
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
391
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
392
- "module.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
393
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00001-of-00002.safetensors",
394
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00001-of-00002.safetensors",
395
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00001-of-00002.safetensors",
396
- "module.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00001-of-00002.safetensors",
397
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00001-of-00002.safetensors",
398
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00001-of-00002.safetensors",
399
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00001-of-00002.safetensors",
400
- "module.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00001-of-00002.safetensors",
401
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
402
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
403
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
404
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
405
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
406
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
407
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
408
- "module.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
409
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00001-of-00002.safetensors",
410
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00001-of-00002.safetensors",
411
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00001-of-00002.safetensors",
412
- "module.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00001-of-00002.safetensors",
413
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00001-of-00002.safetensors",
414
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00001-of-00002.safetensors",
415
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00001-of-00002.safetensors",
416
- "module.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00001-of-00002.safetensors",
417
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
418
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
419
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
420
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
421
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
422
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
423
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
424
- "module.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
425
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00001-of-00002.safetensors",
426
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00001-of-00002.safetensors",
427
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00001-of-00002.safetensors",
428
- "module.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00001-of-00002.safetensors",
429
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00001-of-00002.safetensors",
430
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00001-of-00002.safetensors",
431
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00001-of-00002.safetensors",
432
- "module.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00001-of-00002.safetensors",
433
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
434
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
435
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
436
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
437
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
438
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
439
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
440
- "module.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
441
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00001-of-00002.safetensors",
442
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00001-of-00002.safetensors",
443
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00001-of-00002.safetensors",
444
- "module.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00001-of-00002.safetensors",
445
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00001-of-00002.safetensors",
446
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00001-of-00002.safetensors",
447
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00001-of-00002.safetensors",
448
- "module.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00001-of-00002.safetensors",
449
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
450
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
451
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
452
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
453
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
454
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
455
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
456
- "module.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
457
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm1.bias": "model-00001-of-00002.safetensors",
458
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm1.weight": "model-00001-of-00002.safetensors",
459
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm2.bias": "model-00001-of-00002.safetensors",
460
- "module.vision_tower.vision_model.encoder.layers.24.layer_norm2.weight": "model-00001-of-00002.safetensors",
461
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00001-of-00002.safetensors",
462
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00001-of-00002.safetensors",
463
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00001-of-00002.safetensors",
464
- "module.vision_tower.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00001-of-00002.safetensors",
465
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
466
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
467
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
468
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
469
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
470
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
471
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
472
- "module.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
473
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm1.bias": "model-00001-of-00002.safetensors",
474
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm1.weight": "model-00001-of-00002.safetensors",
475
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm2.bias": "model-00001-of-00002.safetensors",
476
- "module.vision_tower.vision_model.encoder.layers.25.layer_norm2.weight": "model-00001-of-00002.safetensors",
477
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00001-of-00002.safetensors",
478
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00001-of-00002.safetensors",
479
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00001-of-00002.safetensors",
480
- "module.vision_tower.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00001-of-00002.safetensors",
481
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
482
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
483
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
484
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
485
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
486
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
487
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
488
- "module.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
489
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm1.bias": "model-00001-of-00002.safetensors",
490
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm1.weight": "model-00001-of-00002.safetensors",
491
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm2.bias": "model-00001-of-00002.safetensors",
492
- "module.vision_tower.vision_model.encoder.layers.26.layer_norm2.weight": "model-00001-of-00002.safetensors",
493
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc1.bias": "model-00001-of-00002.safetensors",
494
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc1.weight": "model-00001-of-00002.safetensors",
495
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc2.bias": "model-00001-of-00002.safetensors",
496
- "module.vision_tower.vision_model.encoder.layers.26.mlp.fc2.weight": "model-00001-of-00002.safetensors",
497
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
498
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
499
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
500
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
501
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
502
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
503
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
504
- "module.vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
505
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00001-of-00002.safetensors",
506
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00001-of-00002.safetensors",
507
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00001-of-00002.safetensors",
508
- "module.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00001-of-00002.safetensors",
509
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00001-of-00002.safetensors",
510
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00001-of-00002.safetensors",
511
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00001-of-00002.safetensors",
512
- "module.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00001-of-00002.safetensors",
513
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
514
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
515
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
516
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
517
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
518
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
519
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
520
- "module.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
521
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00001-of-00002.safetensors",
522
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00001-of-00002.safetensors",
523
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00001-of-00002.safetensors",
524
- "module.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00001-of-00002.safetensors",
525
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00001-of-00002.safetensors",
526
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00001-of-00002.safetensors",
527
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00001-of-00002.safetensors",
528
- "module.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00001-of-00002.safetensors",
529
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
530
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
531
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
532
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
533
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
534
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
535
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
536
- "module.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
537
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00001-of-00002.safetensors",
538
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00001-of-00002.safetensors",
539
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00001-of-00002.safetensors",
540
- "module.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00001-of-00002.safetensors",
541
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00001-of-00002.safetensors",
542
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00001-of-00002.safetensors",
543
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00001-of-00002.safetensors",
544
- "module.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00001-of-00002.safetensors",
545
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
546
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
547
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
548
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
549
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
550
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
551
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
552
- "module.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
553
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00001-of-00002.safetensors",
554
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00001-of-00002.safetensors",
555
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00001-of-00002.safetensors",
556
- "module.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00001-of-00002.safetensors",
557
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00001-of-00002.safetensors",
558
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00001-of-00002.safetensors",
559
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00001-of-00002.safetensors",
560
- "module.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00001-of-00002.safetensors",
561
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
562
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
563
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
564
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
565
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
566
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
567
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
568
- "module.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
569
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00001-of-00002.safetensors",
570
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00001-of-00002.safetensors",
571
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00001-of-00002.safetensors",
572
- "module.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00001-of-00002.safetensors",
573
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00001-of-00002.safetensors",
574
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00001-of-00002.safetensors",
575
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00001-of-00002.safetensors",
576
- "module.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00001-of-00002.safetensors",
577
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
578
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
579
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
580
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
581
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
582
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
583
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
584
- "module.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
585
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00001-of-00002.safetensors",
586
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00001-of-00002.safetensors",
587
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00001-of-00002.safetensors",
588
- "module.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00001-of-00002.safetensors",
589
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00001-of-00002.safetensors",
590
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00001-of-00002.safetensors",
591
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00001-of-00002.safetensors",
592
- "module.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00001-of-00002.safetensors",
593
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
594
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
595
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
596
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
597
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
598
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
599
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
600
- "module.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
601
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00001-of-00002.safetensors",
602
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00001-of-00002.safetensors",
603
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00001-of-00002.safetensors",
604
- "module.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00001-of-00002.safetensors",
605
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00001-of-00002.safetensors",
606
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00001-of-00002.safetensors",
607
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00001-of-00002.safetensors",
608
- "module.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00001-of-00002.safetensors",
609
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
610
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
611
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00001-of-00002.safetensors",
612
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00001-of-00002.safetensors",
613
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
614
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
615
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
616
- "module.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
617
- "module.vision_tower.vision_model.post_layernorm.bias": "model-00001-of-00002.safetensors",
618
- "module.vision_tower.vision_model.post_layernorm.weight": "model-00001-of-00002.safetensors"
619
- }
620
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ee195ebde9bf012f945f068f133e7fe22fef5450c496607e3ef11cc2034a186
3
- size 15984
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4c5aa7a36c64701b647fb2121298f95ea81b8534c49340aef7115cfe5813f215
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_2.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ecc7a6507625983a175b39a25b874b0daa93e57e2878b66b44161722717adb9
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_3.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fd85380aaf3a2b229e71ba6779d40797431662e65fd98354898f93110b8c8599
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_4.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:17c78776402f62ad362e175b0adb6333c63cc325c13ea9acc996799ee2b72998
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_5.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e38c4a1329b40fbbf59c9bfe31e7f8f4d423b0f795efe741115685c773627812
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_6.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:232e5823d58632b2bd4f0a3eaed918a00d1f4a7ad6e6cfd86785c5645c2ea0ab
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/rng_state_7.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c01740a56a86b482251295ab8c6ab97ad9b353e9d7a86b07de2c27911a8dacb7
3
- size 15920
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f9fb39460522fb2411b8e76bc9c22c5f74c649cb79a004c3dc7f9ef8bac6436
3
- size 1000
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/trainer_state.json DELETED
@@ -1,2708 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 8.0,
6
- "eval_steps": 500,
7
- "global_step": 3824,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.02092050209205021,
14
- "grad_norm": 2892.645263671875,
15
- "learning_rate": 1.125e-06,
16
- "loss": 192.2722,
17
- "step": 10
18
- },
19
- {
20
- "epoch": 0.04184100418410042,
21
- "grad_norm": 2489.1162109375,
22
- "learning_rate": 2.375e-06,
23
- "loss": 180.8469,
24
- "step": 20
25
- },
26
- {
27
- "epoch": 0.06276150627615062,
28
- "grad_norm": 2149.276123046875,
29
- "learning_rate": 3.625e-06,
30
- "loss": 155.2738,
31
- "step": 30
32
- },
33
- {
34
- "epoch": 0.08368200836820083,
35
- "grad_norm": 1601.8026123046875,
36
- "learning_rate": 4.875000000000001e-06,
37
- "loss": 126.1161,
38
- "step": 40
39
- },
40
- {
41
- "epoch": 0.10460251046025104,
42
- "grad_norm": 1006.3649291992188,
43
- "learning_rate": 6.125e-06,
44
- "loss": 95.9051,
45
- "step": 50
46
- },
47
- {
48
- "epoch": 0.12552301255230125,
49
- "grad_norm": 796.5686645507812,
50
- "learning_rate": 7.375e-06,
51
- "loss": 67.7612,
52
- "step": 60
53
- },
54
- {
55
- "epoch": 0.14644351464435146,
56
- "grad_norm": 665.6900024414062,
57
- "learning_rate": 8.625e-06,
58
- "loss": 45.068,
59
- "step": 70
60
- },
61
- {
62
- "epoch": 0.16736401673640167,
63
- "grad_norm": 1340.7882080078125,
64
- "learning_rate": 9.875000000000001e-06,
65
- "loss": 28.4763,
66
- "step": 80
67
- },
68
- {
69
- "epoch": 0.18828451882845187,
70
- "grad_norm": 458.8875427246094,
71
- "learning_rate": 1.1125000000000001e-05,
72
- "loss": 19.2898,
73
- "step": 90
74
- },
75
- {
76
- "epoch": 0.20920502092050208,
77
- "grad_norm": 864.3143310546875,
78
- "learning_rate": 1.2375000000000001e-05,
79
- "loss": 16.0594,
80
- "step": 100
81
- },
82
- {
83
- "epoch": 0.2301255230125523,
84
- "grad_norm": 300.05926513671875,
85
- "learning_rate": 1.3625e-05,
86
- "loss": 14.0882,
87
- "step": 110
88
- },
89
- {
90
- "epoch": 0.2510460251046025,
91
- "grad_norm": 373.1422424316406,
92
- "learning_rate": 1.4875e-05,
93
- "loss": 17.1558,
94
- "step": 120
95
- },
96
- {
97
- "epoch": 0.2719665271966527,
98
- "grad_norm": 3154.36962890625,
99
- "learning_rate": 1.6125000000000002e-05,
100
- "loss": 14.8119,
101
- "step": 130
102
- },
103
- {
104
- "epoch": 0.2928870292887029,
105
- "grad_norm": 346.9271545410156,
106
- "learning_rate": 1.7375e-05,
107
- "loss": 14.6105,
108
- "step": 140
109
- },
110
- {
111
- "epoch": 0.3138075313807531,
112
- "grad_norm": 1485.8084716796875,
113
- "learning_rate": 1.8625000000000002e-05,
114
- "loss": 12.9114,
115
- "step": 150
116
- },
117
- {
118
- "epoch": 0.33472803347280333,
119
- "grad_norm": 171.4276580810547,
120
- "learning_rate": 1.9875000000000002e-05,
121
- "loss": 12.2833,
122
- "step": 160
123
- },
124
- {
125
- "epoch": 0.35564853556485354,
126
- "grad_norm": 155.4971160888672,
127
- "learning_rate": 2.1125000000000002e-05,
128
- "loss": 12.1211,
129
- "step": 170
130
- },
131
- {
132
- "epoch": 0.37656903765690375,
133
- "grad_norm": 715.6805419921875,
134
- "learning_rate": 2.2375000000000002e-05,
135
- "loss": 11.2117,
136
- "step": 180
137
- },
138
- {
139
- "epoch": 0.39748953974895396,
140
- "grad_norm": 500.795654296875,
141
- "learning_rate": 2.3624999999999998e-05,
142
- "loss": 10.4978,
143
- "step": 190
144
- },
145
- {
146
- "epoch": 0.41841004184100417,
147
- "grad_norm": 146.5669403076172,
148
- "learning_rate": 2.4875e-05,
149
- "loss": 10.1568,
150
- "step": 200
151
- },
152
- {
153
- "epoch": 0.4393305439330544,
154
- "grad_norm": 125.15879821777344,
155
- "learning_rate": 2.6124999999999998e-05,
156
- "loss": 9.7391,
157
- "step": 210
158
- },
159
- {
160
- "epoch": 0.4602510460251046,
161
- "grad_norm": 594.5401611328125,
162
- "learning_rate": 2.7375e-05,
163
- "loss": 9.778,
164
- "step": 220
165
- },
166
- {
167
- "epoch": 0.4811715481171548,
168
- "grad_norm": 628.0665893554688,
169
- "learning_rate": 2.8625e-05,
170
- "loss": 9.5704,
171
- "step": 230
172
- },
173
- {
174
- "epoch": 0.502092050209205,
175
- "grad_norm": 69.65560913085938,
176
- "learning_rate": 2.9875000000000004e-05,
177
- "loss": 9.2453,
178
- "step": 240
179
- },
180
- {
181
- "epoch": 0.5230125523012552,
182
- "grad_norm": 79.89998626708984,
183
- "learning_rate": 3.1125000000000004e-05,
184
- "loss": 9.1022,
185
- "step": 250
186
- },
187
- {
188
- "epoch": 0.5439330543933054,
189
- "grad_norm": 201.17601013183594,
190
- "learning_rate": 3.2375e-05,
191
- "loss": 8.7854,
192
- "step": 260
193
- },
194
- {
195
- "epoch": 0.5648535564853556,
196
- "grad_norm": 42.98260498046875,
197
- "learning_rate": 3.3625000000000004e-05,
198
- "loss": 8.6803,
199
- "step": 270
200
- },
201
- {
202
- "epoch": 0.5857740585774058,
203
- "grad_norm": 84.59324645996094,
204
- "learning_rate": 3.4875e-05,
205
- "loss": 8.6771,
206
- "step": 280
207
- },
208
- {
209
- "epoch": 0.606694560669456,
210
- "grad_norm": 74.30402374267578,
211
- "learning_rate": 3.6125000000000004e-05,
212
- "loss": 8.5578,
213
- "step": 290
214
- },
215
- {
216
- "epoch": 0.6276150627615062,
217
- "grad_norm": 34.094974517822266,
218
- "learning_rate": 3.737500000000001e-05,
219
- "loss": 8.5831,
220
- "step": 300
221
- },
222
- {
223
- "epoch": 0.6485355648535565,
224
- "grad_norm": 129.0697021484375,
225
- "learning_rate": 3.8625e-05,
226
- "loss": 8.57,
227
- "step": 310
228
- },
229
- {
230
- "epoch": 0.6694560669456067,
231
- "grad_norm": 134.49334716796875,
232
- "learning_rate": 3.9875e-05,
233
- "loss": 8.6515,
234
- "step": 320
235
- },
236
- {
237
- "epoch": 0.6903765690376569,
238
- "grad_norm": 53.58937072753906,
239
- "learning_rate": 4.1125000000000004e-05,
240
- "loss": 8.5106,
241
- "step": 330
242
- },
243
- {
244
- "epoch": 0.7112970711297071,
245
- "grad_norm": 32.3502197265625,
246
- "learning_rate": 4.237500000000001e-05,
247
- "loss": 8.3622,
248
- "step": 340
249
- },
250
- {
251
- "epoch": 0.7322175732217573,
252
- "grad_norm": 28.943572998046875,
253
- "learning_rate": 4.3625e-05,
254
- "loss": 8.3568,
255
- "step": 350
256
- },
257
- {
258
- "epoch": 0.7531380753138075,
259
- "grad_norm": 21.737060546875,
260
- "learning_rate": 4.4875e-05,
261
- "loss": 8.166,
262
- "step": 360
263
- },
264
- {
265
- "epoch": 0.7740585774058577,
266
- "grad_norm": 53.52946853637695,
267
- "learning_rate": 4.6125e-05,
268
- "loss": 8.0704,
269
- "step": 370
270
- },
271
- {
272
- "epoch": 0.7949790794979079,
273
- "grad_norm": 35.16305923461914,
274
- "learning_rate": 4.7375e-05,
275
- "loss": 8.048,
276
- "step": 380
277
- },
278
- {
279
- "epoch": 0.8158995815899581,
280
- "grad_norm": 25.73539161682129,
281
- "learning_rate": 4.8625e-05,
282
- "loss": 7.9689,
283
- "step": 390
284
- },
285
- {
286
- "epoch": 0.8368200836820083,
287
- "grad_norm": 27.108898162841797,
288
- "learning_rate": 4.9875000000000006e-05,
289
- "loss": 7.9264,
290
- "step": 400
291
- },
292
- {
293
- "epoch": 0.8577405857740585,
294
- "grad_norm": 16.6348876953125,
295
- "learning_rate": 4.986857476635514e-05,
296
- "loss": 7.9187,
297
- "step": 410
298
- },
299
- {
300
- "epoch": 0.8786610878661087,
301
- "grad_norm": 26.402711868286133,
302
- "learning_rate": 4.9722546728971967e-05,
303
- "loss": 7.8116,
304
- "step": 420
305
- },
306
- {
307
- "epoch": 0.899581589958159,
308
- "grad_norm": 20.655763626098633,
309
- "learning_rate": 4.9576518691588786e-05,
310
- "loss": 7.7288,
311
- "step": 430
312
- },
313
- {
314
- "epoch": 0.9205020920502092,
315
- "grad_norm": 18.80156135559082,
316
- "learning_rate": 4.9430490654205605e-05,
317
- "loss": 7.6716,
318
- "step": 440
319
- },
320
- {
321
- "epoch": 0.9414225941422594,
322
- "grad_norm": 16.861543655395508,
323
- "learning_rate": 4.928446261682243e-05,
324
- "loss": 7.6445,
325
- "step": 450
326
- },
327
- {
328
- "epoch": 0.9623430962343096,
329
- "grad_norm": 15.155502319335938,
330
- "learning_rate": 4.913843457943925e-05,
331
- "loss": 7.6161,
332
- "step": 460
333
- },
334
- {
335
- "epoch": 0.9832635983263598,
336
- "grad_norm": 18.212966918945312,
337
- "learning_rate": 4.899240654205608e-05,
338
- "loss": 7.5998,
339
- "step": 470
340
- },
341
- {
342
- "epoch": 1.00418410041841,
343
- "grad_norm": 15.107539176940918,
344
- "learning_rate": 4.88463785046729e-05,
345
- "loss": 7.5322,
346
- "step": 480
347
- },
348
- {
349
- "epoch": 1.0251046025104602,
350
- "grad_norm": 14.914057731628418,
351
- "learning_rate": 4.870035046728972e-05,
352
- "loss": 7.3889,
353
- "step": 490
354
- },
355
- {
356
- "epoch": 1.0460251046025104,
357
- "grad_norm": 15.722795486450195,
358
- "learning_rate": 4.855432242990655e-05,
359
- "loss": 7.3274,
360
- "step": 500
361
- },
362
- {
363
- "epoch": 1.0669456066945606,
364
- "grad_norm": 17.27610969543457,
365
- "learning_rate": 4.840829439252337e-05,
366
- "loss": 7.3273,
367
- "step": 510
368
- },
369
- {
370
- "epoch": 1.0878661087866108,
371
- "grad_norm": 12.244239807128906,
372
- "learning_rate": 4.826226635514019e-05,
373
- "loss": 7.2689,
374
- "step": 520
375
- },
376
- {
377
- "epoch": 1.108786610878661,
378
- "grad_norm": 13.549488067626953,
379
- "learning_rate": 4.811623831775701e-05,
380
- "loss": 7.2727,
381
- "step": 530
382
- },
383
- {
384
- "epoch": 1.1297071129707112,
385
- "grad_norm": 14.482577323913574,
386
- "learning_rate": 4.797021028037383e-05,
387
- "loss": 7.2993,
388
- "step": 540
389
- },
390
- {
391
- "epoch": 1.1506276150627615,
392
- "grad_norm": 14.188762664794922,
393
- "learning_rate": 4.782418224299066e-05,
394
- "loss": 7.2722,
395
- "step": 550
396
- },
397
- {
398
- "epoch": 1.1715481171548117,
399
- "grad_norm": 13.765031814575195,
400
- "learning_rate": 4.767815420560748e-05,
401
- "loss": 7.1693,
402
- "step": 560
403
- },
404
- {
405
- "epoch": 1.1924686192468619,
406
- "grad_norm": 11.574164390563965,
407
- "learning_rate": 4.75321261682243e-05,
408
- "loss": 7.1825,
409
- "step": 570
410
- },
411
- {
412
- "epoch": 1.213389121338912,
413
- "grad_norm": 16.059511184692383,
414
- "learning_rate": 4.738609813084112e-05,
415
- "loss": 7.2044,
416
- "step": 580
417
- },
418
- {
419
- "epoch": 1.2343096234309623,
420
- "grad_norm": 10.73707103729248,
421
- "learning_rate": 4.724007009345794e-05,
422
- "loss": 7.1773,
423
- "step": 590
424
- },
425
- {
426
- "epoch": 1.2552301255230125,
427
- "grad_norm": 12.161375999450684,
428
- "learning_rate": 4.709404205607477e-05,
429
- "loss": 7.1352,
430
- "step": 600
431
- },
432
- {
433
- "epoch": 1.2761506276150627,
434
- "grad_norm": 12.38882064819336,
435
- "learning_rate": 4.694801401869159e-05,
436
- "loss": 7.1451,
437
- "step": 610
438
- },
439
- {
440
- "epoch": 1.297071129707113,
441
- "grad_norm": 16.27951431274414,
442
- "learning_rate": 4.6801985981308414e-05,
443
- "loss": 7.1066,
444
- "step": 620
445
- },
446
- {
447
- "epoch": 1.3179916317991631,
448
- "grad_norm": 11.027902603149414,
449
- "learning_rate": 4.665595794392524e-05,
450
- "loss": 7.0284,
451
- "step": 630
452
- },
453
- {
454
- "epoch": 1.3389121338912133,
455
- "grad_norm": 13.414423942565918,
456
- "learning_rate": 4.650992990654206e-05,
457
- "loss": 7.0589,
458
- "step": 640
459
- },
460
- {
461
- "epoch": 1.3598326359832635,
462
- "grad_norm": 10.195273399353027,
463
- "learning_rate": 4.6363901869158886e-05,
464
- "loss": 7.0349,
465
- "step": 650
466
- },
467
- {
468
- "epoch": 1.3807531380753137,
469
- "grad_norm": 12.190611839294434,
470
- "learning_rate": 4.6217873831775705e-05,
471
- "loss": 7.0636,
472
- "step": 660
473
- },
474
- {
475
- "epoch": 1.401673640167364,
476
- "grad_norm": 11.348795890808105,
477
- "learning_rate": 4.6071845794392524e-05,
478
- "loss": 7.0307,
479
- "step": 670
480
- },
481
- {
482
- "epoch": 1.4225941422594142,
483
- "grad_norm": 16.071359634399414,
484
- "learning_rate": 4.592581775700935e-05,
485
- "loss": 7.0205,
486
- "step": 680
487
- },
488
- {
489
- "epoch": 1.4435146443514644,
490
- "grad_norm": 11.531207084655762,
491
- "learning_rate": 4.577978971962617e-05,
492
- "loss": 7.0528,
493
- "step": 690
494
- },
495
- {
496
- "epoch": 1.4644351464435146,
497
- "grad_norm": 8.887657165527344,
498
- "learning_rate": 4.5633761682242996e-05,
499
- "loss": 6.9423,
500
- "step": 700
501
- },
502
- {
503
- "epoch": 1.4853556485355648,
504
- "grad_norm": 12.700973510742188,
505
- "learning_rate": 4.5487733644859815e-05,
506
- "loss": 6.9055,
507
- "step": 710
508
- },
509
- {
510
- "epoch": 1.506276150627615,
511
- "grad_norm": 9.196358680725098,
512
- "learning_rate": 4.5341705607476634e-05,
513
- "loss": 7.0065,
514
- "step": 720
515
- },
516
- {
517
- "epoch": 1.5271966527196654,
518
- "grad_norm": 13.546262741088867,
519
- "learning_rate": 4.519567757009346e-05,
520
- "loss": 6.9184,
521
- "step": 730
522
- },
523
- {
524
- "epoch": 1.5481171548117154,
525
- "grad_norm": 9.600874900817871,
526
- "learning_rate": 4.504964953271028e-05,
527
- "loss": 6.9053,
528
- "step": 740
529
- },
530
- {
531
- "epoch": 1.5690376569037658,
532
- "grad_norm": 11.587425231933594,
533
- "learning_rate": 4.4903621495327106e-05,
534
- "loss": 6.9315,
535
- "step": 750
536
- },
537
- {
538
- "epoch": 1.5899581589958158,
539
- "grad_norm": 9.545685768127441,
540
- "learning_rate": 4.4757593457943925e-05,
541
- "loss": 6.9654,
542
- "step": 760
543
- },
544
- {
545
- "epoch": 1.6108786610878663,
546
- "grad_norm": 9.259549140930176,
547
- "learning_rate": 4.461156542056075e-05,
548
- "loss": 6.892,
549
- "step": 770
550
- },
551
- {
552
- "epoch": 1.6317991631799162,
553
- "grad_norm": 9.985551834106445,
554
- "learning_rate": 4.446553738317758e-05,
555
- "loss": 6.8576,
556
- "step": 780
557
- },
558
- {
559
- "epoch": 1.6527196652719667,
560
- "grad_norm": 10.179107666015625,
561
- "learning_rate": 4.43195093457944e-05,
562
- "loss": 6.8912,
563
- "step": 790
564
- },
565
- {
566
- "epoch": 1.6736401673640167,
567
- "grad_norm": 8.945125579833984,
568
- "learning_rate": 4.4173481308411216e-05,
569
- "loss": 6.8148,
570
- "step": 800
571
- },
572
- {
573
- "epoch": 1.694560669456067,
574
- "grad_norm": 10.720569610595703,
575
- "learning_rate": 4.402745327102804e-05,
576
- "loss": 6.8843,
577
- "step": 810
578
- },
579
- {
580
- "epoch": 1.715481171548117,
581
- "grad_norm": 8.655113220214844,
582
- "learning_rate": 4.388142523364486e-05,
583
- "loss": 6.8585,
584
- "step": 820
585
- },
586
- {
587
- "epoch": 1.7364016736401675,
588
- "grad_norm": 10.961562156677246,
589
- "learning_rate": 4.373539719626169e-05,
590
- "loss": 6.8428,
591
- "step": 830
592
- },
593
- {
594
- "epoch": 1.7573221757322175,
595
- "grad_norm": 8.046053886413574,
596
- "learning_rate": 4.358936915887851e-05,
597
- "loss": 6.7643,
598
- "step": 840
599
- },
600
- {
601
- "epoch": 1.778242677824268,
602
- "grad_norm": 9.744955062866211,
603
- "learning_rate": 4.3443341121495326e-05,
604
- "loss": 6.8291,
605
- "step": 850
606
- },
607
- {
608
- "epoch": 1.799163179916318,
609
- "grad_norm": 9.127840995788574,
610
- "learning_rate": 4.329731308411215e-05,
611
- "loss": 6.7762,
612
- "step": 860
613
- },
614
- {
615
- "epoch": 1.8200836820083683,
616
- "grad_norm": 8.581077575683594,
617
- "learning_rate": 4.315128504672897e-05,
618
- "loss": 6.6851,
619
- "step": 870
620
- },
621
- {
622
- "epoch": 1.8410041841004183,
623
- "grad_norm": 9.594968795776367,
624
- "learning_rate": 4.30052570093458e-05,
625
- "loss": 6.767,
626
- "step": 880
627
- },
628
- {
629
- "epoch": 1.8619246861924688,
630
- "grad_norm": 9.343400955200195,
631
- "learning_rate": 4.285922897196262e-05,
632
- "loss": 6.7971,
633
- "step": 890
634
- },
635
- {
636
- "epoch": 1.8828451882845187,
637
- "grad_norm": 8.668853759765625,
638
- "learning_rate": 4.2713200934579436e-05,
639
- "loss": 6.7351,
640
- "step": 900
641
- },
642
- {
643
- "epoch": 1.9037656903765692,
644
- "grad_norm": 8.734278678894043,
645
- "learning_rate": 4.256717289719626e-05,
646
- "loss": 6.7335,
647
- "step": 910
648
- },
649
- {
650
- "epoch": 1.9246861924686192,
651
- "grad_norm": 9.673972129821777,
652
- "learning_rate": 4.242114485981308e-05,
653
- "loss": 6.7721,
654
- "step": 920
655
- },
656
- {
657
- "epoch": 1.9456066945606696,
658
- "grad_norm": 8.5903902053833,
659
- "learning_rate": 4.227511682242991e-05,
660
- "loss": 6.7273,
661
- "step": 930
662
- },
663
- {
664
- "epoch": 1.9665271966527196,
665
- "grad_norm": 8.411205291748047,
666
- "learning_rate": 4.2129088785046734e-05,
667
- "loss": 6.6956,
668
- "step": 940
669
- },
670
- {
671
- "epoch": 1.98744769874477,
672
- "grad_norm": 9.401385307312012,
673
- "learning_rate": 4.198306074766355e-05,
674
- "loss": 6.7187,
675
- "step": 950
676
- },
677
- {
678
- "epoch": 2.00836820083682,
679
- "grad_norm": 9.34141731262207,
680
- "learning_rate": 4.183703271028038e-05,
681
- "loss": 6.5779,
682
- "step": 960
683
- },
684
- {
685
- "epoch": 2.0292887029288704,
686
- "grad_norm": 10.119585990905762,
687
- "learning_rate": 4.16910046728972e-05,
688
- "loss": 6.348,
689
- "step": 970
690
- },
691
- {
692
- "epoch": 2.0502092050209204,
693
- "grad_norm": 8.02117919921875,
694
- "learning_rate": 4.154497663551402e-05,
695
- "loss": 6.3786,
696
- "step": 980
697
- },
698
- {
699
- "epoch": 2.071129707112971,
700
- "grad_norm": 9.365386962890625,
701
- "learning_rate": 4.1398948598130844e-05,
702
- "loss": 6.3379,
703
- "step": 990
704
- },
705
- {
706
- "epoch": 2.092050209205021,
707
- "grad_norm": 8.408126831054688,
708
- "learning_rate": 4.1252920560747664e-05,
709
- "loss": 6.2594,
710
- "step": 1000
711
- },
712
- {
713
- "epoch": 2.1129707112970713,
714
- "grad_norm": 9.55907917022705,
715
- "learning_rate": 4.110689252336449e-05,
716
- "loss": 6.3537,
717
- "step": 1010
718
- },
719
- {
720
- "epoch": 2.1338912133891212,
721
- "grad_norm": 9.469207763671875,
722
- "learning_rate": 4.096086448598131e-05,
723
- "loss": 6.3571,
724
- "step": 1020
725
- },
726
- {
727
- "epoch": 2.1548117154811717,
728
- "grad_norm": 7.865747451782227,
729
- "learning_rate": 4.081483644859813e-05,
730
- "loss": 6.3416,
731
- "step": 1030
732
- },
733
- {
734
- "epoch": 2.1757322175732217,
735
- "grad_norm": 7.543406963348389,
736
- "learning_rate": 4.0668808411214954e-05,
737
- "loss": 6.2591,
738
- "step": 1040
739
- },
740
- {
741
- "epoch": 2.196652719665272,
742
- "grad_norm": 7.795525550842285,
743
- "learning_rate": 4.0522780373831774e-05,
744
- "loss": 6.3203,
745
- "step": 1050
746
- },
747
- {
748
- "epoch": 2.217573221757322,
749
- "grad_norm": 7.388299942016602,
750
- "learning_rate": 4.03767523364486e-05,
751
- "loss": 6.3065,
752
- "step": 1060
753
- },
754
- {
755
- "epoch": 2.2384937238493725,
756
- "grad_norm": 9.478675842285156,
757
- "learning_rate": 4.023072429906542e-05,
758
- "loss": 6.3233,
759
- "step": 1070
760
- },
761
- {
762
- "epoch": 2.2594142259414225,
763
- "grad_norm": 8.189627647399902,
764
- "learning_rate": 4.0084696261682245e-05,
765
- "loss": 6.3194,
766
- "step": 1080
767
- },
768
- {
769
- "epoch": 2.280334728033473,
770
- "grad_norm": 8.942906379699707,
771
- "learning_rate": 3.993866822429907e-05,
772
- "loss": 6.3037,
773
- "step": 1090
774
- },
775
- {
776
- "epoch": 2.301255230125523,
777
- "grad_norm": 7.5349531173706055,
778
- "learning_rate": 3.979264018691589e-05,
779
- "loss": 6.3382,
780
- "step": 1100
781
- },
782
- {
783
- "epoch": 2.3221757322175733,
784
- "grad_norm": 9.456353187561035,
785
- "learning_rate": 3.964661214953272e-05,
786
- "loss": 6.3286,
787
- "step": 1110
788
- },
789
- {
790
- "epoch": 2.3430962343096233,
791
- "grad_norm": 9.879541397094727,
792
- "learning_rate": 3.9500584112149536e-05,
793
- "loss": 6.2711,
794
- "step": 1120
795
- },
796
- {
797
- "epoch": 2.3640167364016738,
798
- "grad_norm": 7.1197333335876465,
799
- "learning_rate": 3.9354556074766355e-05,
800
- "loss": 6.3526,
801
- "step": 1130
802
- },
803
- {
804
- "epoch": 2.3849372384937237,
805
- "grad_norm": 9.088199615478516,
806
- "learning_rate": 3.920852803738318e-05,
807
- "loss": 6.2993,
808
- "step": 1140
809
- },
810
- {
811
- "epoch": 2.405857740585774,
812
- "grad_norm": 9.107407569885254,
813
- "learning_rate": 3.90625e-05,
814
- "loss": 6.2867,
815
- "step": 1150
816
- },
817
- {
818
- "epoch": 2.426778242677824,
819
- "grad_norm": 7.08643913269043,
820
- "learning_rate": 3.891647196261683e-05,
821
- "loss": 6.2747,
822
- "step": 1160
823
- },
824
- {
825
- "epoch": 2.4476987447698746,
826
- "grad_norm": 10.01321792602539,
827
- "learning_rate": 3.8770443925233646e-05,
828
- "loss": 6.2636,
829
- "step": 1170
830
- },
831
- {
832
- "epoch": 2.4686192468619246,
833
- "grad_norm": 7.717972278594971,
834
- "learning_rate": 3.8624415887850466e-05,
835
- "loss": 6.293,
836
- "step": 1180
837
- },
838
- {
839
- "epoch": 2.489539748953975,
840
- "grad_norm": 7.490470886230469,
841
- "learning_rate": 3.847838785046729e-05,
842
- "loss": 6.2688,
843
- "step": 1190
844
- },
845
- {
846
- "epoch": 2.510460251046025,
847
- "grad_norm": 10.63131332397461,
848
- "learning_rate": 3.833235981308411e-05,
849
- "loss": 6.308,
850
- "step": 1200
851
- },
852
- {
853
- "epoch": 2.5313807531380754,
854
- "grad_norm": 7.755945682525635,
855
- "learning_rate": 3.818633177570094e-05,
856
- "loss": 6.2759,
857
- "step": 1210
858
- },
859
- {
860
- "epoch": 2.5523012552301254,
861
- "grad_norm": 9.319307327270508,
862
- "learning_rate": 3.8040303738317756e-05,
863
- "loss": 6.2482,
864
- "step": 1220
865
- },
866
- {
867
- "epoch": 2.573221757322176,
868
- "grad_norm": 9.639796257019043,
869
- "learning_rate": 3.789427570093458e-05,
870
- "loss": 6.2637,
871
- "step": 1230
872
- },
873
- {
874
- "epoch": 2.594142259414226,
875
- "grad_norm": 7.218570709228516,
876
- "learning_rate": 3.774824766355141e-05,
877
- "loss": 6.2721,
878
- "step": 1240
879
- },
880
- {
881
- "epoch": 2.6150627615062763,
882
- "grad_norm": 8.478962898254395,
883
- "learning_rate": 3.760221962616823e-05,
884
- "loss": 6.2577,
885
- "step": 1250
886
- },
887
- {
888
- "epoch": 2.6359832635983262,
889
- "grad_norm": 8.476099014282227,
890
- "learning_rate": 3.745619158878505e-05,
891
- "loss": 6.2207,
892
- "step": 1260
893
- },
894
- {
895
- "epoch": 2.6569037656903767,
896
- "grad_norm": 10.189292907714844,
897
- "learning_rate": 3.731016355140187e-05,
898
- "loss": 6.2125,
899
- "step": 1270
900
- },
901
- {
902
- "epoch": 2.6778242677824267,
903
- "grad_norm": 8.161721229553223,
904
- "learning_rate": 3.716413551401869e-05,
905
- "loss": 6.2357,
906
- "step": 1280
907
- },
908
- {
909
- "epoch": 2.698744769874477,
910
- "grad_norm": 6.979925155639648,
911
- "learning_rate": 3.701810747663552e-05,
912
- "loss": 6.2222,
913
- "step": 1290
914
- },
915
- {
916
- "epoch": 2.719665271966527,
917
- "grad_norm": 7.450411796569824,
918
- "learning_rate": 3.687207943925234e-05,
919
- "loss": 6.2231,
920
- "step": 1300
921
- },
922
- {
923
- "epoch": 2.7405857740585775,
924
- "grad_norm": 8.66945743560791,
925
- "learning_rate": 3.672605140186916e-05,
926
- "loss": 6.2537,
927
- "step": 1310
928
- },
929
- {
930
- "epoch": 2.7615062761506275,
931
- "grad_norm": 9.507169723510742,
932
- "learning_rate": 3.6580023364485984e-05,
933
- "loss": 6.2562,
934
- "step": 1320
935
- },
936
- {
937
- "epoch": 2.782426778242678,
938
- "grad_norm": 8.572477340698242,
939
- "learning_rate": 3.64339953271028e-05,
940
- "loss": 6.2061,
941
- "step": 1330
942
- },
943
- {
944
- "epoch": 2.803347280334728,
945
- "grad_norm": 9.34544849395752,
946
- "learning_rate": 3.628796728971963e-05,
947
- "loss": 6.2286,
948
- "step": 1340
949
- },
950
- {
951
- "epoch": 2.8242677824267783,
952
- "grad_norm": 6.928922653198242,
953
- "learning_rate": 3.614193925233645e-05,
954
- "loss": 6.1866,
955
- "step": 1350
956
- },
957
- {
958
- "epoch": 2.8451882845188283,
959
- "grad_norm": 7.335088729858398,
960
- "learning_rate": 3.599591121495327e-05,
961
- "loss": 6.1943,
962
- "step": 1360
963
- },
964
- {
965
- "epoch": 2.8661087866108788,
966
- "grad_norm": 7.386289119720459,
967
- "learning_rate": 3.5849883177570094e-05,
968
- "loss": 6.2077,
969
- "step": 1370
970
- },
971
- {
972
- "epoch": 2.8870292887029287,
973
- "grad_norm": 7.754251480102539,
974
- "learning_rate": 3.570385514018692e-05,
975
- "loss": 6.2244,
976
- "step": 1380
977
- },
978
- {
979
- "epoch": 2.907949790794979,
980
- "grad_norm": 7.074305057525635,
981
- "learning_rate": 3.555782710280374e-05,
982
- "loss": 6.1476,
983
- "step": 1390
984
- },
985
- {
986
- "epoch": 2.928870292887029,
987
- "grad_norm": 8.084433555603027,
988
- "learning_rate": 3.5411799065420565e-05,
989
- "loss": 6.1445,
990
- "step": 1400
991
- },
992
- {
993
- "epoch": 2.9497907949790796,
994
- "grad_norm": 9.687230110168457,
995
- "learning_rate": 3.5265771028037385e-05,
996
- "loss": 6.1787,
997
- "step": 1410
998
- },
999
- {
1000
- "epoch": 2.9707112970711296,
1001
- "grad_norm": 7.559036731719971,
1002
- "learning_rate": 3.511974299065421e-05,
1003
- "loss": 6.1879,
1004
- "step": 1420
1005
- },
1006
- {
1007
- "epoch": 2.99163179916318,
1008
- "grad_norm": 7.462767124176025,
1009
- "learning_rate": 3.497371495327103e-05,
1010
- "loss": 6.236,
1011
- "step": 1430
1012
- },
1013
- {
1014
- "epoch": 3.01255230125523,
1015
- "grad_norm": 6.79940128326416,
1016
- "learning_rate": 3.4827686915887856e-05,
1017
- "loss": 6.0333,
1018
- "step": 1440
1019
- },
1020
- {
1021
- "epoch": 3.0334728033472804,
1022
- "grad_norm": 6.548897743225098,
1023
- "learning_rate": 3.4681658878504675e-05,
1024
- "loss": 5.9014,
1025
- "step": 1450
1026
- },
1027
- {
1028
- "epoch": 3.0543933054393304,
1029
- "grad_norm": 6.509470462799072,
1030
- "learning_rate": 3.4535630841121495e-05,
1031
- "loss": 5.8679,
1032
- "step": 1460
1033
- },
1034
- {
1035
- "epoch": 3.075313807531381,
1036
- "grad_norm": 6.476815700531006,
1037
- "learning_rate": 3.438960280373832e-05,
1038
- "loss": 5.8759,
1039
- "step": 1470
1040
- },
1041
- {
1042
- "epoch": 3.096234309623431,
1043
- "grad_norm": 6.345800399780273,
1044
- "learning_rate": 3.424357476635514e-05,
1045
- "loss": 5.8719,
1046
- "step": 1480
1047
- },
1048
- {
1049
- "epoch": 3.1171548117154813,
1050
- "grad_norm": 7.304464340209961,
1051
- "learning_rate": 3.4097546728971966e-05,
1052
- "loss": 5.8334,
1053
- "step": 1490
1054
- },
1055
- {
1056
- "epoch": 3.1380753138075312,
1057
- "grad_norm": 6.38894510269165,
1058
- "learning_rate": 3.3951518691588786e-05,
1059
- "loss": 5.9223,
1060
- "step": 1500
1061
- },
1062
- {
1063
- "epoch": 3.1589958158995817,
1064
- "grad_norm": 7.105144500732422,
1065
- "learning_rate": 3.3805490654205605e-05,
1066
- "loss": 5.8514,
1067
- "step": 1510
1068
- },
1069
- {
1070
- "epoch": 3.1799163179916317,
1071
- "grad_norm": 7.784251689910889,
1072
- "learning_rate": 3.365946261682243e-05,
1073
- "loss": 5.8741,
1074
- "step": 1520
1075
- },
1076
- {
1077
- "epoch": 3.200836820083682,
1078
- "grad_norm": 6.41173791885376,
1079
- "learning_rate": 3.351343457943925e-05,
1080
- "loss": 5.8883,
1081
- "step": 1530
1082
- },
1083
- {
1084
- "epoch": 3.221757322175732,
1085
- "grad_norm": 7.181066513061523,
1086
- "learning_rate": 3.3367406542056076e-05,
1087
- "loss": 5.9113,
1088
- "step": 1540
1089
- },
1090
- {
1091
- "epoch": 3.2426778242677825,
1092
- "grad_norm": 8.183653831481934,
1093
- "learning_rate": 3.32213785046729e-05,
1094
- "loss": 5.8483,
1095
- "step": 1550
1096
- },
1097
- {
1098
- "epoch": 3.2635983263598325,
1099
- "grad_norm": 8.355222702026367,
1100
- "learning_rate": 3.307535046728972e-05,
1101
- "loss": 5.8591,
1102
- "step": 1560
1103
- },
1104
- {
1105
- "epoch": 3.284518828451883,
1106
- "grad_norm": 6.041286468505859,
1107
- "learning_rate": 3.292932242990655e-05,
1108
- "loss": 5.8794,
1109
- "step": 1570
1110
- },
1111
- {
1112
- "epoch": 3.305439330543933,
1113
- "grad_norm": 6.80169677734375,
1114
- "learning_rate": 3.278329439252337e-05,
1115
- "loss": 5.8728,
1116
- "step": 1580
1117
- },
1118
- {
1119
- "epoch": 3.3263598326359833,
1120
- "grad_norm": 7.62702751159668,
1121
- "learning_rate": 3.2637266355140187e-05,
1122
- "loss": 5.8677,
1123
- "step": 1590
1124
- },
1125
- {
1126
- "epoch": 3.3472803347280333,
1127
- "grad_norm": 6.874912738800049,
1128
- "learning_rate": 3.249123831775701e-05,
1129
- "loss": 5.8925,
1130
- "step": 1600
1131
- },
1132
- {
1133
- "epoch": 3.3682008368200838,
1134
- "grad_norm": 7.741762161254883,
1135
- "learning_rate": 3.234521028037383e-05,
1136
- "loss": 5.88,
1137
- "step": 1610
1138
- },
1139
- {
1140
- "epoch": 3.3891213389121337,
1141
- "grad_norm": 7.18850564956665,
1142
- "learning_rate": 3.219918224299066e-05,
1143
- "loss": 5.8732,
1144
- "step": 1620
1145
- },
1146
- {
1147
- "epoch": 3.410041841004184,
1148
- "grad_norm": 6.788341045379639,
1149
- "learning_rate": 3.205315420560748e-05,
1150
- "loss": 5.9006,
1151
- "step": 1630
1152
- },
1153
- {
1154
- "epoch": 3.430962343096234,
1155
- "grad_norm": 6.941610336303711,
1156
- "learning_rate": 3.19071261682243e-05,
1157
- "loss": 5.8629,
1158
- "step": 1640
1159
- },
1160
- {
1161
- "epoch": 3.4518828451882846,
1162
- "grad_norm": 6.032546043395996,
1163
- "learning_rate": 3.176109813084112e-05,
1164
- "loss": 5.8498,
1165
- "step": 1650
1166
- },
1167
- {
1168
- "epoch": 3.4728033472803346,
1169
- "grad_norm": 6.58389949798584,
1170
- "learning_rate": 3.161507009345794e-05,
1171
- "loss": 5.8916,
1172
- "step": 1660
1173
- },
1174
- {
1175
- "epoch": 3.493723849372385,
1176
- "grad_norm": 6.209228515625,
1177
- "learning_rate": 3.146904205607477e-05,
1178
- "loss": 5.837,
1179
- "step": 1670
1180
- },
1181
- {
1182
- "epoch": 3.514644351464435,
1183
- "grad_norm": 7.131625175476074,
1184
- "learning_rate": 3.132301401869159e-05,
1185
- "loss": 5.8811,
1186
- "step": 1680
1187
- },
1188
- {
1189
- "epoch": 3.5355648535564854,
1190
- "grad_norm": 8.637256622314453,
1191
- "learning_rate": 3.1176985981308414e-05,
1192
- "loss": 5.8718,
1193
- "step": 1690
1194
- },
1195
- {
1196
- "epoch": 3.5564853556485354,
1197
- "grad_norm": 7.244524002075195,
1198
- "learning_rate": 3.103095794392524e-05,
1199
- "loss": 5.9001,
1200
- "step": 1700
1201
- },
1202
- {
1203
- "epoch": 3.577405857740586,
1204
- "grad_norm": 5.958163261413574,
1205
- "learning_rate": 3.088492990654206e-05,
1206
- "loss": 5.85,
1207
- "step": 1710
1208
- },
1209
- {
1210
- "epoch": 3.598326359832636,
1211
- "grad_norm": 7.176133632659912,
1212
- "learning_rate": 3.073890186915888e-05,
1213
- "loss": 5.8519,
1214
- "step": 1720
1215
- },
1216
- {
1217
- "epoch": 3.6192468619246863,
1218
- "grad_norm": 7.154526710510254,
1219
- "learning_rate": 3.0592873831775705e-05,
1220
- "loss": 5.8795,
1221
- "step": 1730
1222
- },
1223
- {
1224
- "epoch": 3.6401673640167362,
1225
- "grad_norm": 6.325348377227783,
1226
- "learning_rate": 3.0446845794392527e-05,
1227
- "loss": 5.8531,
1228
- "step": 1740
1229
- },
1230
- {
1231
- "epoch": 3.6610878661087867,
1232
- "grad_norm": 8.396771430969238,
1233
- "learning_rate": 3.0300817757009347e-05,
1234
- "loss": 5.8396,
1235
- "step": 1750
1236
- },
1237
- {
1238
- "epoch": 3.6820083682008367,
1239
- "grad_norm": 6.478153705596924,
1240
- "learning_rate": 3.015478971962617e-05,
1241
- "loss": 5.8739,
1242
- "step": 1760
1243
- },
1244
- {
1245
- "epoch": 3.702928870292887,
1246
- "grad_norm": 6.151018142700195,
1247
- "learning_rate": 3.0008761682242992e-05,
1248
- "loss": 5.8618,
1249
- "step": 1770
1250
- },
1251
- {
1252
- "epoch": 3.723849372384937,
1253
- "grad_norm": 6.827515125274658,
1254
- "learning_rate": 2.9862733644859815e-05,
1255
- "loss": 5.8579,
1256
- "step": 1780
1257
- },
1258
- {
1259
- "epoch": 3.7447698744769875,
1260
- "grad_norm": 6.936313152313232,
1261
- "learning_rate": 2.9716705607476637e-05,
1262
- "loss": 5.828,
1263
- "step": 1790
1264
- },
1265
- {
1266
- "epoch": 3.7656903765690375,
1267
- "grad_norm": 6.858672142028809,
1268
- "learning_rate": 2.9570677570093457e-05,
1269
- "loss": 5.7797,
1270
- "step": 1800
1271
- },
1272
- {
1273
- "epoch": 3.786610878661088,
1274
- "grad_norm": 6.23930549621582,
1275
- "learning_rate": 2.942464953271028e-05,
1276
- "loss": 5.8288,
1277
- "step": 1810
1278
- },
1279
- {
1280
- "epoch": 3.8075313807531384,
1281
- "grad_norm": 5.978524208068848,
1282
- "learning_rate": 2.9278621495327102e-05,
1283
- "loss": 5.864,
1284
- "step": 1820
1285
- },
1286
- {
1287
- "epoch": 3.8284518828451883,
1288
- "grad_norm": 6.331532955169678,
1289
- "learning_rate": 2.9132593457943925e-05,
1290
- "loss": 5.8326,
1291
- "step": 1830
1292
- },
1293
- {
1294
- "epoch": 3.8493723849372383,
1295
- "grad_norm": 6.491503715515137,
1296
- "learning_rate": 2.898656542056075e-05,
1297
- "loss": 5.8421,
1298
- "step": 1840
1299
- },
1300
- {
1301
- "epoch": 3.8702928870292888,
1302
- "grad_norm": 7.954758167266846,
1303
- "learning_rate": 2.8840537383177574e-05,
1304
- "loss": 5.8402,
1305
- "step": 1850
1306
- },
1307
- {
1308
- "epoch": 3.891213389121339,
1309
- "grad_norm": 6.846292018890381,
1310
- "learning_rate": 2.8694509345794396e-05,
1311
- "loss": 5.8259,
1312
- "step": 1860
1313
- },
1314
- {
1315
- "epoch": 3.912133891213389,
1316
- "grad_norm": 7.5431671142578125,
1317
- "learning_rate": 2.854848130841122e-05,
1318
- "loss": 5.8546,
1319
- "step": 1870
1320
- },
1321
- {
1322
- "epoch": 3.933054393305439,
1323
- "grad_norm": 6.461066246032715,
1324
- "learning_rate": 2.840245327102804e-05,
1325
- "loss": 5.8609,
1326
- "step": 1880
1327
- },
1328
- {
1329
- "epoch": 3.9539748953974896,
1330
- "grad_norm": 6.202910423278809,
1331
- "learning_rate": 2.825642523364486e-05,
1332
- "loss": 5.8621,
1333
- "step": 1890
1334
- },
1335
- {
1336
- "epoch": 3.97489539748954,
1337
- "grad_norm": 7.479728698730469,
1338
- "learning_rate": 2.8110397196261684e-05,
1339
- "loss": 5.808,
1340
- "step": 1900
1341
- },
1342
- {
1343
- "epoch": 3.99581589958159,
1344
- "grad_norm": 6.868503570556641,
1345
- "learning_rate": 2.7964369158878507e-05,
1346
- "loss": 5.8582,
1347
- "step": 1910
1348
- },
1349
- {
1350
- "epoch": 4.01673640167364,
1351
- "grad_norm": 5.496687412261963,
1352
- "learning_rate": 2.781834112149533e-05,
1353
- "loss": 5.7099,
1354
- "step": 1920
1355
- },
1356
- {
1357
- "epoch": 4.03765690376569,
1358
- "grad_norm": 6.652906894683838,
1359
- "learning_rate": 2.767231308411215e-05,
1360
- "loss": 5.6679,
1361
- "step": 1930
1362
- },
1363
- {
1364
- "epoch": 4.058577405857741,
1365
- "grad_norm": 5.448336601257324,
1366
- "learning_rate": 2.752628504672897e-05,
1367
- "loss": 5.696,
1368
- "step": 1940
1369
- },
1370
- {
1371
- "epoch": 4.079497907949791,
1372
- "grad_norm": 5.418632507324219,
1373
- "learning_rate": 2.7380257009345794e-05,
1374
- "loss": 5.6242,
1375
- "step": 1950
1376
- },
1377
- {
1378
- "epoch": 4.100418410041841,
1379
- "grad_norm": 5.651141166687012,
1380
- "learning_rate": 2.7234228971962617e-05,
1381
- "loss": 5.6568,
1382
- "step": 1960
1383
- },
1384
- {
1385
- "epoch": 4.121338912133891,
1386
- "grad_norm": 5.76585578918457,
1387
- "learning_rate": 2.708820093457944e-05,
1388
- "loss": 5.633,
1389
- "step": 1970
1390
- },
1391
- {
1392
- "epoch": 4.142259414225942,
1393
- "grad_norm": 6.460199356079102,
1394
- "learning_rate": 2.6942172897196262e-05,
1395
- "loss": 5.6089,
1396
- "step": 1980
1397
- },
1398
- {
1399
- "epoch": 4.163179916317992,
1400
- "grad_norm": 6.158806800842285,
1401
- "learning_rate": 2.679614485981308e-05,
1402
- "loss": 5.6393,
1403
- "step": 1990
1404
- },
1405
- {
1406
- "epoch": 4.184100418410042,
1407
- "grad_norm": 6.397863864898682,
1408
- "learning_rate": 2.665011682242991e-05,
1409
- "loss": 5.6295,
1410
- "step": 2000
1411
- },
1412
- {
1413
- "epoch": 4.205020920502092,
1414
- "grad_norm": 6.268986225128174,
1415
- "learning_rate": 2.6504088785046734e-05,
1416
- "loss": 5.631,
1417
- "step": 2010
1418
- },
1419
- {
1420
- "epoch": 4.2259414225941425,
1421
- "grad_norm": 5.931266784667969,
1422
- "learning_rate": 2.6358060747663553e-05,
1423
- "loss": 5.6246,
1424
- "step": 2020
1425
- },
1426
- {
1427
- "epoch": 4.2468619246861925,
1428
- "grad_norm": 6.028716087341309,
1429
- "learning_rate": 2.6212032710280376e-05,
1430
- "loss": 5.6398,
1431
- "step": 2030
1432
- },
1433
- {
1434
- "epoch": 4.2677824267782425,
1435
- "grad_norm": 5.64882755279541,
1436
- "learning_rate": 2.60660046728972e-05,
1437
- "loss": 5.6255,
1438
- "step": 2040
1439
- },
1440
- {
1441
- "epoch": 4.2887029288702925,
1442
- "grad_norm": 5.4745635986328125,
1443
- "learning_rate": 2.591997663551402e-05,
1444
- "loss": 5.6545,
1445
- "step": 2050
1446
- },
1447
- {
1448
- "epoch": 4.309623430962343,
1449
- "grad_norm": 5.935332298278809,
1450
- "learning_rate": 2.5773948598130844e-05,
1451
- "loss": 5.6088,
1452
- "step": 2060
1453
- },
1454
- {
1455
- "epoch": 4.330543933054393,
1456
- "grad_norm": 5.224592685699463,
1457
- "learning_rate": 2.5627920560747663e-05,
1458
- "loss": 5.5845,
1459
- "step": 2070
1460
- },
1461
- {
1462
- "epoch": 4.351464435146443,
1463
- "grad_norm": 5.875316143035889,
1464
- "learning_rate": 2.5481892523364486e-05,
1465
- "loss": 5.6726,
1466
- "step": 2080
1467
- },
1468
- {
1469
- "epoch": 4.372384937238493,
1470
- "grad_norm": 6.300271987915039,
1471
- "learning_rate": 2.533586448598131e-05,
1472
- "loss": 5.6344,
1473
- "step": 2090
1474
- },
1475
- {
1476
- "epoch": 4.393305439330544,
1477
- "grad_norm": 5.322109222412109,
1478
- "learning_rate": 2.518983644859813e-05,
1479
- "loss": 5.6661,
1480
- "step": 2100
1481
- },
1482
- {
1483
- "epoch": 4.414225941422594,
1484
- "grad_norm": 6.655251979827881,
1485
- "learning_rate": 2.5043808411214954e-05,
1486
- "loss": 5.6228,
1487
- "step": 2110
1488
- },
1489
- {
1490
- "epoch": 4.435146443514644,
1491
- "grad_norm": 6.288819313049316,
1492
- "learning_rate": 2.4897780373831777e-05,
1493
- "loss": 5.6608,
1494
- "step": 2120
1495
- },
1496
- {
1497
- "epoch": 4.456066945606695,
1498
- "grad_norm": 6.775393486022949,
1499
- "learning_rate": 2.47517523364486e-05,
1500
- "loss": 5.6565,
1501
- "step": 2130
1502
- },
1503
- {
1504
- "epoch": 4.476987447698745,
1505
- "grad_norm": 6.130661487579346,
1506
- "learning_rate": 2.4605724299065422e-05,
1507
- "loss": 5.7261,
1508
- "step": 2140
1509
- },
1510
- {
1511
- "epoch": 4.497907949790795,
1512
- "grad_norm": 5.6841864585876465,
1513
- "learning_rate": 2.4459696261682245e-05,
1514
- "loss": 5.6311,
1515
- "step": 2150
1516
- },
1517
- {
1518
- "epoch": 4.518828451882845,
1519
- "grad_norm": 5.966508865356445,
1520
- "learning_rate": 2.4313668224299068e-05,
1521
- "loss": 5.6297,
1522
- "step": 2160
1523
- },
1524
- {
1525
- "epoch": 4.539748953974895,
1526
- "grad_norm": 6.141171932220459,
1527
- "learning_rate": 2.4167640186915887e-05,
1528
- "loss": 5.5967,
1529
- "step": 2170
1530
- },
1531
- {
1532
- "epoch": 4.560669456066946,
1533
- "grad_norm": 5.987954616546631,
1534
- "learning_rate": 2.402161214953271e-05,
1535
- "loss": 5.6456,
1536
- "step": 2180
1537
- },
1538
- {
1539
- "epoch": 4.581589958158996,
1540
- "grad_norm": 5.768298149108887,
1541
- "learning_rate": 2.3875584112149536e-05,
1542
- "loss": 5.5983,
1543
- "step": 2190
1544
- },
1545
- {
1546
- "epoch": 4.602510460251046,
1547
- "grad_norm": 6.168797492980957,
1548
- "learning_rate": 2.372955607476636e-05,
1549
- "loss": 5.6419,
1550
- "step": 2200
1551
- },
1552
- {
1553
- "epoch": 4.623430962343097,
1554
- "grad_norm": 6.4928460121154785,
1555
- "learning_rate": 2.3583528037383178e-05,
1556
- "loss": 5.5798,
1557
- "step": 2210
1558
- },
1559
- {
1560
- "epoch": 4.644351464435147,
1561
- "grad_norm": 5.2203521728515625,
1562
- "learning_rate": 2.34375e-05,
1563
- "loss": 5.6485,
1564
- "step": 2220
1565
- },
1566
- {
1567
- "epoch": 4.665271966527197,
1568
- "grad_norm": 6.13705587387085,
1569
- "learning_rate": 2.3291471962616823e-05,
1570
- "loss": 5.6366,
1571
- "step": 2230
1572
- },
1573
- {
1574
- "epoch": 4.686192468619247,
1575
- "grad_norm": 5.693001747131348,
1576
- "learning_rate": 2.3145443925233646e-05,
1577
- "loss": 5.5801,
1578
- "step": 2240
1579
- },
1580
- {
1581
- "epoch": 4.707112970711297,
1582
- "grad_norm": 5.713005542755127,
1583
- "learning_rate": 2.299941588785047e-05,
1584
- "loss": 5.5998,
1585
- "step": 2250
1586
- },
1587
- {
1588
- "epoch": 4.7280334728033475,
1589
- "grad_norm": 7.082109451293945,
1590
- "learning_rate": 2.285338785046729e-05,
1591
- "loss": 5.6667,
1592
- "step": 2260
1593
- },
1594
- {
1595
- "epoch": 4.7489539748953975,
1596
- "grad_norm": 5.609521389007568,
1597
- "learning_rate": 2.2707359813084114e-05,
1598
- "loss": 5.6409,
1599
- "step": 2270
1600
- },
1601
- {
1602
- "epoch": 4.7698744769874475,
1603
- "grad_norm": 7.057249546051025,
1604
- "learning_rate": 2.2561331775700937e-05,
1605
- "loss": 5.6367,
1606
- "step": 2280
1607
- },
1608
- {
1609
- "epoch": 4.790794979079498,
1610
- "grad_norm": 5.788174152374268,
1611
- "learning_rate": 2.241530373831776e-05,
1612
- "loss": 5.6282,
1613
- "step": 2290
1614
- },
1615
- {
1616
- "epoch": 4.811715481171548,
1617
- "grad_norm": 7.124642372131348,
1618
- "learning_rate": 2.226927570093458e-05,
1619
- "loss": 5.6511,
1620
- "step": 2300
1621
- },
1622
- {
1623
- "epoch": 4.832635983263598,
1624
- "grad_norm": 5.990348815917969,
1625
- "learning_rate": 2.21232476635514e-05,
1626
- "loss": 5.6328,
1627
- "step": 2310
1628
- },
1629
- {
1630
- "epoch": 4.853556485355648,
1631
- "grad_norm": 5.4413604736328125,
1632
- "learning_rate": 2.1977219626168224e-05,
1633
- "loss": 5.6169,
1634
- "step": 2320
1635
- },
1636
- {
1637
- "epoch": 4.874476987447698,
1638
- "grad_norm": 5.1628875732421875,
1639
- "learning_rate": 2.1831191588785047e-05,
1640
- "loss": 5.6147,
1641
- "step": 2330
1642
- },
1643
- {
1644
- "epoch": 4.895397489539749,
1645
- "grad_norm": 5.829679489135742,
1646
- "learning_rate": 2.1685163551401873e-05,
1647
- "loss": 5.5966,
1648
- "step": 2340
1649
- },
1650
- {
1651
- "epoch": 4.916317991631799,
1652
- "grad_norm": 5.632717132568359,
1653
- "learning_rate": 2.1539135514018692e-05,
1654
- "loss": 5.6365,
1655
- "step": 2350
1656
- },
1657
- {
1658
- "epoch": 4.937238493723849,
1659
- "grad_norm": 5.266159534454346,
1660
- "learning_rate": 2.1393107476635515e-05,
1661
- "loss": 5.615,
1662
- "step": 2360
1663
- },
1664
- {
1665
- "epoch": 4.9581589958159,
1666
- "grad_norm": 6.379525661468506,
1667
- "learning_rate": 2.1247079439252338e-05,
1668
- "loss": 5.6182,
1669
- "step": 2370
1670
- },
1671
- {
1672
- "epoch": 4.97907949790795,
1673
- "grad_norm": 6.879369258880615,
1674
- "learning_rate": 2.110105140186916e-05,
1675
- "loss": 5.6051,
1676
- "step": 2380
1677
- },
1678
- {
1679
- "epoch": 5.0,
1680
- "grad_norm": 5.6393723487854,
1681
- "learning_rate": 2.0955023364485983e-05,
1682
- "loss": 5.5976,
1683
- "step": 2390
1684
- },
1685
- {
1686
- "epoch": 5.02092050209205,
1687
- "grad_norm": 4.893331050872803,
1688
- "learning_rate": 2.0808995327102803e-05,
1689
- "loss": 5.4576,
1690
- "step": 2400
1691
- },
1692
- {
1693
- "epoch": 5.0418410041841,
1694
- "grad_norm": 5.28970193862915,
1695
- "learning_rate": 2.0662967289719625e-05,
1696
- "loss": 5.4601,
1697
- "step": 2410
1698
- },
1699
- {
1700
- "epoch": 5.062761506276151,
1701
- "grad_norm": 4.617730140686035,
1702
- "learning_rate": 2.051693925233645e-05,
1703
- "loss": 5.4666,
1704
- "step": 2420
1705
- },
1706
- {
1707
- "epoch": 5.083682008368201,
1708
- "grad_norm": 5.753638744354248,
1709
- "learning_rate": 2.0370911214953274e-05,
1710
- "loss": 5.4661,
1711
- "step": 2430
1712
- },
1713
- {
1714
- "epoch": 5.104602510460251,
1715
- "grad_norm": 5.47844123840332,
1716
- "learning_rate": 2.0224883177570093e-05,
1717
- "loss": 5.4668,
1718
- "step": 2440
1719
- },
1720
- {
1721
- "epoch": 5.125523012552302,
1722
- "grad_norm": 5.340426921844482,
1723
- "learning_rate": 2.0078855140186916e-05,
1724
- "loss": 5.4427,
1725
- "step": 2450
1726
- },
1727
- {
1728
- "epoch": 5.146443514644352,
1729
- "grad_norm": 5.01825475692749,
1730
- "learning_rate": 1.993282710280374e-05,
1731
- "loss": 5.4998,
1732
- "step": 2460
1733
- },
1734
- {
1735
- "epoch": 5.167364016736402,
1736
- "grad_norm": 4.745482444763184,
1737
- "learning_rate": 1.978679906542056e-05,
1738
- "loss": 5.4902,
1739
- "step": 2470
1740
- },
1741
- {
1742
- "epoch": 5.188284518828452,
1743
- "grad_norm": 6.2382588386535645,
1744
- "learning_rate": 1.9640771028037384e-05,
1745
- "loss": 5.5755,
1746
- "step": 2480
1747
- },
1748
- {
1749
- "epoch": 5.209205020920502,
1750
- "grad_norm": 5.502182483673096,
1751
- "learning_rate": 1.9494742990654207e-05,
1752
- "loss": 5.5131,
1753
- "step": 2490
1754
- },
1755
- {
1756
- "epoch": 5.2301255230125525,
1757
- "grad_norm": 4.979122638702393,
1758
- "learning_rate": 1.934871495327103e-05,
1759
- "loss": 5.5219,
1760
- "step": 2500
1761
- },
1762
- {
1763
- "epoch": 5.2510460251046025,
1764
- "grad_norm": 5.3337082862854,
1765
- "learning_rate": 1.9202686915887852e-05,
1766
- "loss": 5.5234,
1767
- "step": 2510
1768
- },
1769
- {
1770
- "epoch": 5.2719665271966525,
1771
- "grad_norm": 5.246830463409424,
1772
- "learning_rate": 1.9056658878504675e-05,
1773
- "loss": 5.493,
1774
- "step": 2520
1775
- },
1776
- {
1777
- "epoch": 5.292887029288703,
1778
- "grad_norm": 5.157444477081299,
1779
- "learning_rate": 1.8910630841121494e-05,
1780
- "loss": 5.4945,
1781
- "step": 2530
1782
- },
1783
- {
1784
- "epoch": 5.313807531380753,
1785
- "grad_norm": 4.96690034866333,
1786
- "learning_rate": 1.8764602803738317e-05,
1787
- "loss": 5.4813,
1788
- "step": 2540
1789
- },
1790
- {
1791
- "epoch": 5.334728033472803,
1792
- "grad_norm": 5.21718168258667,
1793
- "learning_rate": 1.861857476635514e-05,
1794
- "loss": 5.4243,
1795
- "step": 2550
1796
- },
1797
- {
1798
- "epoch": 5.355648535564853,
1799
- "grad_norm": 5.486342430114746,
1800
- "learning_rate": 1.8472546728971963e-05,
1801
- "loss": 5.8887,
1802
- "step": 2560
1803
- },
1804
- {
1805
- "epoch": 5.376569037656903,
1806
- "grad_norm": 5.079704284667969,
1807
- "learning_rate": 1.832651869158879e-05,
1808
- "loss": 5.4686,
1809
- "step": 2570
1810
- },
1811
- {
1812
- "epoch": 5.397489539748954,
1813
- "grad_norm": 6.411098957061768,
1814
- "learning_rate": 1.8180490654205608e-05,
1815
- "loss": 5.4585,
1816
- "step": 2580
1817
- },
1818
- {
1819
- "epoch": 5.418410041841004,
1820
- "grad_norm": 5.305148124694824,
1821
- "learning_rate": 1.803446261682243e-05,
1822
- "loss": 5.4659,
1823
- "step": 2590
1824
- },
1825
- {
1826
- "epoch": 5.439330543933054,
1827
- "grad_norm": 4.71307897567749,
1828
- "learning_rate": 1.7888434579439253e-05,
1829
- "loss": 5.476,
1830
- "step": 2600
1831
- },
1832
- {
1833
- "epoch": 5.460251046025105,
1834
- "grad_norm": 4.948145866394043,
1835
- "learning_rate": 1.7742406542056076e-05,
1836
- "loss": 5.4835,
1837
- "step": 2610
1838
- },
1839
- {
1840
- "epoch": 5.481171548117155,
1841
- "grad_norm": 5.343236446380615,
1842
- "learning_rate": 1.75963785046729e-05,
1843
- "loss": 5.4553,
1844
- "step": 2620
1845
- },
1846
- {
1847
- "epoch": 5.502092050209205,
1848
- "grad_norm": 5.69612979888916,
1849
- "learning_rate": 1.7450350467289718e-05,
1850
- "loss": 5.4989,
1851
- "step": 2630
1852
- },
1853
- {
1854
- "epoch": 5.523012552301255,
1855
- "grad_norm": 6.676581382751465,
1856
- "learning_rate": 1.730432242990654e-05,
1857
- "loss": 5.4625,
1858
- "step": 2640
1859
- },
1860
- {
1861
- "epoch": 5.543933054393305,
1862
- "grad_norm": 5.195738315582275,
1863
- "learning_rate": 1.7158294392523367e-05,
1864
- "loss": 5.4589,
1865
- "step": 2650
1866
- },
1867
- {
1868
- "epoch": 5.564853556485356,
1869
- "grad_norm": 4.7489190101623535,
1870
- "learning_rate": 1.701226635514019e-05,
1871
- "loss": 5.4564,
1872
- "step": 2660
1873
- },
1874
- {
1875
- "epoch": 5.585774058577406,
1876
- "grad_norm": 6.458863735198975,
1877
- "learning_rate": 1.686623831775701e-05,
1878
- "loss": 5.4976,
1879
- "step": 2670
1880
- },
1881
- {
1882
- "epoch": 5.606694560669456,
1883
- "grad_norm": 4.868573188781738,
1884
- "learning_rate": 1.672021028037383e-05,
1885
- "loss": 5.5464,
1886
- "step": 2680
1887
- },
1888
- {
1889
- "epoch": 5.627615062761507,
1890
- "grad_norm": 5.2593674659729,
1891
- "learning_rate": 1.6574182242990654e-05,
1892
- "loss": 5.5095,
1893
- "step": 2690
1894
- },
1895
- {
1896
- "epoch": 5.648535564853557,
1897
- "grad_norm": 5.476712226867676,
1898
- "learning_rate": 1.6428154205607477e-05,
1899
- "loss": 5.4386,
1900
- "step": 2700
1901
- },
1902
- {
1903
- "epoch": 5.669456066945607,
1904
- "grad_norm": 5.268965244293213,
1905
- "learning_rate": 1.62821261682243e-05,
1906
- "loss": 5.4822,
1907
- "step": 2710
1908
- },
1909
- {
1910
- "epoch": 5.690376569037657,
1911
- "grad_norm": 4.747652053833008,
1912
- "learning_rate": 1.6136098130841123e-05,
1913
- "loss": 5.4622,
1914
- "step": 2720
1915
- },
1916
- {
1917
- "epoch": 5.711297071129707,
1918
- "grad_norm": 5.430568695068359,
1919
- "learning_rate": 1.5990070093457945e-05,
1920
- "loss": 5.4866,
1921
- "step": 2730
1922
- },
1923
- {
1924
- "epoch": 5.7322175732217575,
1925
- "grad_norm": 4.823955535888672,
1926
- "learning_rate": 1.5844042056074768e-05,
1927
- "loss": 5.4663,
1928
- "step": 2740
1929
- },
1930
- {
1931
- "epoch": 5.7531380753138075,
1932
- "grad_norm": 5.330785751342773,
1933
- "learning_rate": 1.569801401869159e-05,
1934
- "loss": 5.4269,
1935
- "step": 2750
1936
- },
1937
- {
1938
- "epoch": 5.7740585774058575,
1939
- "grad_norm": 4.768071174621582,
1940
- "learning_rate": 1.5551985981308413e-05,
1941
- "loss": 5.4914,
1942
- "step": 2760
1943
- },
1944
- {
1945
- "epoch": 5.794979079497908,
1946
- "grad_norm": 4.776971817016602,
1947
- "learning_rate": 1.5405957943925233e-05,
1948
- "loss": 5.5026,
1949
- "step": 2770
1950
- },
1951
- {
1952
- "epoch": 5.815899581589958,
1953
- "grad_norm": 4.9256463050842285,
1954
- "learning_rate": 1.5259929906542055e-05,
1955
- "loss": 5.4678,
1956
- "step": 2780
1957
- },
1958
- {
1959
- "epoch": 5.836820083682008,
1960
- "grad_norm": 5.471677780151367,
1961
- "learning_rate": 1.5113901869158878e-05,
1962
- "loss": 5.4513,
1963
- "step": 2790
1964
- },
1965
- {
1966
- "epoch": 5.857740585774058,
1967
- "grad_norm": 5.3272318840026855,
1968
- "learning_rate": 1.4967873831775703e-05,
1969
- "loss": 5.4602,
1970
- "step": 2800
1971
- },
1972
- {
1973
- "epoch": 5.878661087866108,
1974
- "grad_norm": 6.088062763214111,
1975
- "learning_rate": 1.4821845794392525e-05,
1976
- "loss": 5.4508,
1977
- "step": 2810
1978
- },
1979
- {
1980
- "epoch": 5.899581589958159,
1981
- "grad_norm": 5.610989570617676,
1982
- "learning_rate": 1.4675817757009346e-05,
1983
- "loss": 5.4853,
1984
- "step": 2820
1985
- },
1986
- {
1987
- "epoch": 5.920502092050209,
1988
- "grad_norm": 5.627800464630127,
1989
- "learning_rate": 1.4529789719626169e-05,
1990
- "loss": 5.4534,
1991
- "step": 2830
1992
- },
1993
- {
1994
- "epoch": 5.941422594142259,
1995
- "grad_norm": 4.9768242835998535,
1996
- "learning_rate": 1.4383761682242992e-05,
1997
- "loss": 5.5166,
1998
- "step": 2840
1999
- },
2000
- {
2001
- "epoch": 5.96234309623431,
2002
- "grad_norm": 5.74553918838501,
2003
- "learning_rate": 1.4237733644859813e-05,
2004
- "loss": 5.4795,
2005
- "step": 2850
2006
- },
2007
- {
2008
- "epoch": 5.98326359832636,
2009
- "grad_norm": 5.134048938751221,
2010
- "learning_rate": 1.4091705607476635e-05,
2011
- "loss": 5.4516,
2012
- "step": 2860
2013
- },
2014
- {
2015
- "epoch": 6.00418410041841,
2016
- "grad_norm": 4.69456148147583,
2017
- "learning_rate": 1.394567757009346e-05,
2018
- "loss": 5.4478,
2019
- "step": 2870
2020
- },
2021
- {
2022
- "epoch": 6.02510460251046,
2023
- "grad_norm": 4.978532791137695,
2024
- "learning_rate": 1.3799649532710283e-05,
2025
- "loss": 5.4058,
2026
- "step": 2880
2027
- },
2028
- {
2029
- "epoch": 6.046025104602511,
2030
- "grad_norm": 4.94218111038208,
2031
- "learning_rate": 1.3653621495327104e-05,
2032
- "loss": 5.3842,
2033
- "step": 2890
2034
- },
2035
- {
2036
- "epoch": 6.066945606694561,
2037
- "grad_norm": 4.698522090911865,
2038
- "learning_rate": 1.3507593457943926e-05,
2039
- "loss": 5.3858,
2040
- "step": 2900
2041
- },
2042
- {
2043
- "epoch": 6.087866108786611,
2044
- "grad_norm": 4.722201347351074,
2045
- "learning_rate": 1.3361565420560749e-05,
2046
- "loss": 5.3834,
2047
- "step": 2910
2048
- },
2049
- {
2050
- "epoch": 6.108786610878661,
2051
- "grad_norm": 4.465900897979736,
2052
- "learning_rate": 1.321553738317757e-05,
2053
- "loss": 5.3465,
2054
- "step": 2920
2055
- },
2056
- {
2057
- "epoch": 6.129707112970712,
2058
- "grad_norm": 4.588057518005371,
2059
- "learning_rate": 1.3069509345794393e-05,
2060
- "loss": 5.4118,
2061
- "step": 2930
2062
- },
2063
- {
2064
- "epoch": 6.150627615062762,
2065
- "grad_norm": 4.46038293838501,
2066
- "learning_rate": 1.2923481308411214e-05,
2067
- "loss": 5.3614,
2068
- "step": 2940
2069
- },
2070
- {
2071
- "epoch": 6.171548117154812,
2072
- "grad_norm": 4.710984230041504,
2073
- "learning_rate": 1.277745327102804e-05,
2074
- "loss": 5.328,
2075
- "step": 2950
2076
- },
2077
- {
2078
- "epoch": 6.192468619246862,
2079
- "grad_norm": 4.511489391326904,
2080
- "learning_rate": 1.263142523364486e-05,
2081
- "loss": 5.38,
2082
- "step": 2960
2083
- },
2084
- {
2085
- "epoch": 6.2133891213389125,
2086
- "grad_norm": 4.490772724151611,
2087
- "learning_rate": 1.2485397196261684e-05,
2088
- "loss": 5.3674,
2089
- "step": 2970
2090
- },
2091
- {
2092
- "epoch": 6.2343096234309625,
2093
- "grad_norm": 5.166575908660889,
2094
- "learning_rate": 1.2339369158878506e-05,
2095
- "loss": 5.3945,
2096
- "step": 2980
2097
- },
2098
- {
2099
- "epoch": 6.2552301255230125,
2100
- "grad_norm": 4.254697322845459,
2101
- "learning_rate": 1.2193341121495327e-05,
2102
- "loss": 5.3574,
2103
- "step": 2990
2104
- },
2105
- {
2106
- "epoch": 6.2761506276150625,
2107
- "grad_norm": 4.542943477630615,
2108
- "learning_rate": 1.204731308411215e-05,
2109
- "loss": 5.3809,
2110
- "step": 3000
2111
- },
2112
- {
2113
- "epoch": 6.297071129707113,
2114
- "grad_norm": 4.283222198486328,
2115
- "learning_rate": 1.1901285046728973e-05,
2116
- "loss": 5.3715,
2117
- "step": 3010
2118
- },
2119
- {
2120
- "epoch": 6.317991631799163,
2121
- "grad_norm": 4.2042155265808105,
2122
- "learning_rate": 1.1755257009345795e-05,
2123
- "loss": 5.3525,
2124
- "step": 3020
2125
- },
2126
- {
2127
- "epoch": 6.338912133891213,
2128
- "grad_norm": 5.394988536834717,
2129
- "learning_rate": 1.1609228971962616e-05,
2130
- "loss": 5.3997,
2131
- "step": 3030
2132
- },
2133
- {
2134
- "epoch": 6.359832635983263,
2135
- "grad_norm": 4.921761512756348,
2136
- "learning_rate": 1.1463200934579439e-05,
2137
- "loss": 5.3822,
2138
- "step": 3040
2139
- },
2140
- {
2141
- "epoch": 6.380753138075314,
2142
- "grad_norm": 4.8779449462890625,
2143
- "learning_rate": 1.1317172897196262e-05,
2144
- "loss": 5.3993,
2145
- "step": 3050
2146
- },
2147
- {
2148
- "epoch": 6.401673640167364,
2149
- "grad_norm": 5.6628875732421875,
2150
- "learning_rate": 1.1171144859813085e-05,
2151
- "loss": 5.4184,
2152
- "step": 3060
2153
- },
2154
- {
2155
- "epoch": 6.422594142259414,
2156
- "grad_norm": 5.005777359008789,
2157
- "learning_rate": 1.1025116822429907e-05,
2158
- "loss": 5.3914,
2159
- "step": 3070
2160
- },
2161
- {
2162
- "epoch": 6.443514644351464,
2163
- "grad_norm": 5.296278476715088,
2164
- "learning_rate": 1.087908878504673e-05,
2165
- "loss": 5.3814,
2166
- "step": 3080
2167
- },
2168
- {
2169
- "epoch": 6.464435146443515,
2170
- "grad_norm": 4.869510173797607,
2171
- "learning_rate": 1.0733060747663553e-05,
2172
- "loss": 5.3585,
2173
- "step": 3090
2174
- },
2175
- {
2176
- "epoch": 6.485355648535565,
2177
- "grad_norm": 5.106717109680176,
2178
- "learning_rate": 1.0587032710280374e-05,
2179
- "loss": 5.3638,
2180
- "step": 3100
2181
- },
2182
- {
2183
- "epoch": 6.506276150627615,
2184
- "grad_norm": 11.799344062805176,
2185
- "learning_rate": 1.0441004672897196e-05,
2186
- "loss": 5.3664,
2187
- "step": 3110
2188
- },
2189
- {
2190
- "epoch": 6.527196652719665,
2191
- "grad_norm": 4.456765174865723,
2192
- "learning_rate": 1.0294976635514019e-05,
2193
- "loss": 5.3867,
2194
- "step": 3120
2195
- },
2196
- {
2197
- "epoch": 6.548117154811716,
2198
- "grad_norm": 4.739964008331299,
2199
- "learning_rate": 1.0148948598130842e-05,
2200
- "loss": 5.3711,
2201
- "step": 3130
2202
- },
2203
- {
2204
- "epoch": 6.569037656903766,
2205
- "grad_norm": 5.056917667388916,
2206
- "learning_rate": 1.0002920560747665e-05,
2207
- "loss": 5.393,
2208
- "step": 3140
2209
- },
2210
- {
2211
- "epoch": 6.589958158995816,
2212
- "grad_norm": 3.9486734867095947,
2213
- "learning_rate": 9.856892523364486e-06,
2214
- "loss": 5.3844,
2215
- "step": 3150
2216
- },
2217
- {
2218
- "epoch": 6.610878661087866,
2219
- "grad_norm": 4.636373043060303,
2220
- "learning_rate": 9.71086448598131e-06,
2221
- "loss": 5.3648,
2222
- "step": 3160
2223
- },
2224
- {
2225
- "epoch": 6.631799163179917,
2226
- "grad_norm": 5.06486701965332,
2227
- "learning_rate": 9.564836448598131e-06,
2228
- "loss": 5.3781,
2229
- "step": 3170
2230
- },
2231
- {
2232
- "epoch": 6.652719665271967,
2233
- "grad_norm": 4.988446235656738,
2234
- "learning_rate": 9.418808411214954e-06,
2235
- "loss": 5.3977,
2236
- "step": 3180
2237
- },
2238
- {
2239
- "epoch": 6.673640167364017,
2240
- "grad_norm": 5.134821891784668,
2241
- "learning_rate": 9.272780373831776e-06,
2242
- "loss": 5.3853,
2243
- "step": 3190
2244
- },
2245
- {
2246
- "epoch": 6.694560669456067,
2247
- "grad_norm": 4.971440315246582,
2248
- "learning_rate": 9.126752336448599e-06,
2249
- "loss": 5.4187,
2250
- "step": 3200
2251
- },
2252
- {
2253
- "epoch": 6.7154811715481175,
2254
- "grad_norm": 5.063161373138428,
2255
- "learning_rate": 8.980724299065422e-06,
2256
- "loss": 5.3687,
2257
- "step": 3210
2258
- },
2259
- {
2260
- "epoch": 6.7364016736401675,
2261
- "grad_norm": 4.764766216278076,
2262
- "learning_rate": 8.834696261682243e-06,
2263
- "loss": 5.3962,
2264
- "step": 3220
2265
- },
2266
- {
2267
- "epoch": 6.7573221757322175,
2268
- "grad_norm": 4.914244651794434,
2269
- "learning_rate": 8.688668224299066e-06,
2270
- "loss": 5.3916,
2271
- "step": 3230
2272
- },
2273
- {
2274
- "epoch": 6.7782426778242675,
2275
- "grad_norm": 5.3414387702941895,
2276
- "learning_rate": 8.542640186915888e-06,
2277
- "loss": 5.3517,
2278
- "step": 3240
2279
- },
2280
- {
2281
- "epoch": 6.799163179916318,
2282
- "grad_norm": 4.932905673980713,
2283
- "learning_rate": 8.396612149532711e-06,
2284
- "loss": 5.395,
2285
- "step": 3250
2286
- },
2287
- {
2288
- "epoch": 6.820083682008368,
2289
- "grad_norm": 4.897671699523926,
2290
- "learning_rate": 8.250584112149532e-06,
2291
- "loss": 5.4139,
2292
- "step": 3260
2293
- },
2294
- {
2295
- "epoch": 6.841004184100418,
2296
- "grad_norm": 4.509671211242676,
2297
- "learning_rate": 8.104556074766355e-06,
2298
- "loss": 5.3595,
2299
- "step": 3270
2300
- },
2301
- {
2302
- "epoch": 6.861924686192468,
2303
- "grad_norm": 5.010800838470459,
2304
- "learning_rate": 7.958528037383179e-06,
2305
- "loss": 5.4167,
2306
- "step": 3280
2307
- },
2308
- {
2309
- "epoch": 6.882845188284519,
2310
- "grad_norm": 4.797663688659668,
2311
- "learning_rate": 7.8125e-06,
2312
- "loss": 5.3714,
2313
- "step": 3290
2314
- },
2315
- {
2316
- "epoch": 6.903765690376569,
2317
- "grad_norm": 4.6787543296813965,
2318
- "learning_rate": 7.666471962616823e-06,
2319
- "loss": 5.362,
2320
- "step": 3300
2321
- },
2322
- {
2323
- "epoch": 6.924686192468619,
2324
- "grad_norm": 5.634804725646973,
2325
- "learning_rate": 7.520443925233646e-06,
2326
- "loss": 5.3973,
2327
- "step": 3310
2328
- },
2329
- {
2330
- "epoch": 6.945606694560669,
2331
- "grad_norm": 4.700837135314941,
2332
- "learning_rate": 7.374415887850468e-06,
2333
- "loss": 5.3814,
2334
- "step": 3320
2335
- },
2336
- {
2337
- "epoch": 6.96652719665272,
2338
- "grad_norm": 4.776175498962402,
2339
- "learning_rate": 7.22838785046729e-06,
2340
- "loss": 5.4619,
2341
- "step": 3330
2342
- },
2343
- {
2344
- "epoch": 6.98744769874477,
2345
- "grad_norm": 4.83176851272583,
2346
- "learning_rate": 7.082359813084112e-06,
2347
- "loss": 5.367,
2348
- "step": 3340
2349
- },
2350
- {
2351
- "epoch": 7.00836820083682,
2352
- "grad_norm": 4.134293556213379,
2353
- "learning_rate": 6.936331775700936e-06,
2354
- "loss": 5.36,
2355
- "step": 3350
2356
- },
2357
- {
2358
- "epoch": 7.02928870292887,
2359
- "grad_norm": 4.107188701629639,
2360
- "learning_rate": 6.7903037383177575e-06,
2361
- "loss": 5.3474,
2362
- "step": 3360
2363
- },
2364
- {
2365
- "epoch": 7.050209205020921,
2366
- "grad_norm": 4.398261070251465,
2367
- "learning_rate": 6.644275700934579e-06,
2368
- "loss": 5.3765,
2369
- "step": 3370
2370
- },
2371
- {
2372
- "epoch": 7.071129707112971,
2373
- "grad_norm": 4.313539028167725,
2374
- "learning_rate": 6.498247663551402e-06,
2375
- "loss": 5.3409,
2376
- "step": 3380
2377
- },
2378
- {
2379
- "epoch": 7.092050209205021,
2380
- "grad_norm": 4.105713844299316,
2381
- "learning_rate": 6.352219626168225e-06,
2382
- "loss": 5.3471,
2383
- "step": 3390
2384
- },
2385
- {
2386
- "epoch": 7.112970711297071,
2387
- "grad_norm": 5.01039457321167,
2388
- "learning_rate": 6.2061915887850475e-06,
2389
- "loss": 5.3206,
2390
- "step": 3400
2391
- },
2392
- {
2393
- "epoch": 7.133891213389122,
2394
- "grad_norm": 4.230257034301758,
2395
- "learning_rate": 6.060163551401869e-06,
2396
- "loss": 5.3366,
2397
- "step": 3410
2398
- },
2399
- {
2400
- "epoch": 7.154811715481172,
2401
- "grad_norm": 4.975685119628906,
2402
- "learning_rate": 5.914135514018692e-06,
2403
- "loss": 5.3402,
2404
- "step": 3420
2405
- },
2406
- {
2407
- "epoch": 7.175732217573222,
2408
- "grad_norm": 4.542948246002197,
2409
- "learning_rate": 5.768107476635514e-06,
2410
- "loss": 5.3499,
2411
- "step": 3430
2412
- },
2413
- {
2414
- "epoch": 7.196652719665272,
2415
- "grad_norm": 4.824731826782227,
2416
- "learning_rate": 5.622079439252337e-06,
2417
- "loss": 5.3365,
2418
- "step": 3440
2419
- },
2420
- {
2421
- "epoch": 7.2175732217573225,
2422
- "grad_norm": 4.396641254425049,
2423
- "learning_rate": 5.4760514018691585e-06,
2424
- "loss": 5.2971,
2425
- "step": 3450
2426
- },
2427
- {
2428
- "epoch": 7.2384937238493725,
2429
- "grad_norm": 4.402401447296143,
2430
- "learning_rate": 5.330023364485982e-06,
2431
- "loss": 5.3066,
2432
- "step": 3460
2433
- },
2434
- {
2435
- "epoch": 7.2594142259414225,
2436
- "grad_norm": 4.199159622192383,
2437
- "learning_rate": 5.183995327102804e-06,
2438
- "loss": 5.3325,
2439
- "step": 3470
2440
- },
2441
- {
2442
- "epoch": 7.2803347280334725,
2443
- "grad_norm": 4.842240333557129,
2444
- "learning_rate": 5.037967289719627e-06,
2445
- "loss": 5.3296,
2446
- "step": 3480
2447
- },
2448
- {
2449
- "epoch": 7.301255230125523,
2450
- "grad_norm": 4.354606628417969,
2451
- "learning_rate": 4.8919392523364485e-06,
2452
- "loss": 5.3383,
2453
- "step": 3490
2454
- },
2455
- {
2456
- "epoch": 7.322175732217573,
2457
- "grad_norm": 5.421429634094238,
2458
- "learning_rate": 4.745911214953271e-06,
2459
- "loss": 5.3626,
2460
- "step": 3500
2461
- },
2462
- {
2463
- "epoch": 7.343096234309623,
2464
- "grad_norm": 4.057744026184082,
2465
- "learning_rate": 4.599883177570094e-06,
2466
- "loss": 5.3314,
2467
- "step": 3510
2468
- },
2469
- {
2470
- "epoch": 7.364016736401673,
2471
- "grad_norm": 4.317595481872559,
2472
- "learning_rate": 4.453855140186916e-06,
2473
- "loss": 5.3421,
2474
- "step": 3520
2475
- },
2476
- {
2477
- "epoch": 7.384937238493724,
2478
- "grad_norm": 4.638053894042969,
2479
- "learning_rate": 4.3078271028037385e-06,
2480
- "loss": 5.3216,
2481
- "step": 3530
2482
- },
2483
- {
2484
- "epoch": 7.405857740585774,
2485
- "grad_norm": 4.188862323760986,
2486
- "learning_rate": 4.161799065420561e-06,
2487
- "loss": 5.2974,
2488
- "step": 3540
2489
- },
2490
- {
2491
- "epoch": 7.426778242677824,
2492
- "grad_norm": 4.47027587890625,
2493
- "learning_rate": 4.015771028037384e-06,
2494
- "loss": 5.3287,
2495
- "step": 3550
2496
- },
2497
- {
2498
- "epoch": 7.447698744769874,
2499
- "grad_norm": 3.957211971282959,
2500
- "learning_rate": 3.869742990654206e-06,
2501
- "loss": 5.3148,
2502
- "step": 3560
2503
- },
2504
- {
2505
- "epoch": 7.468619246861925,
2506
- "grad_norm": 4.483851432800293,
2507
- "learning_rate": 3.7237149532710285e-06,
2508
- "loss": 5.3509,
2509
- "step": 3570
2510
- },
2511
- {
2512
- "epoch": 7.489539748953975,
2513
- "grad_norm": 4.861904144287109,
2514
- "learning_rate": 3.5776869158878503e-06,
2515
- "loss": 5.312,
2516
- "step": 3580
2517
- },
2518
- {
2519
- "epoch": 7.510460251046025,
2520
- "grad_norm": 4.722445011138916,
2521
- "learning_rate": 3.4316588785046735e-06,
2522
- "loss": 5.3201,
2523
- "step": 3590
2524
- },
2525
- {
2526
- "epoch": 7.531380753138075,
2527
- "grad_norm": 4.904317378997803,
2528
- "learning_rate": 3.2856308411214953e-06,
2529
- "loss": 5.3577,
2530
- "step": 3600
2531
- },
2532
- {
2533
- "epoch": 7.552301255230126,
2534
- "grad_norm": 4.562917232513428,
2535
- "learning_rate": 3.139602803738318e-06,
2536
- "loss": 5.335,
2537
- "step": 3610
2538
- },
2539
- {
2540
- "epoch": 7.573221757322176,
2541
- "grad_norm": 4.725756645202637,
2542
- "learning_rate": 2.9935747663551403e-06,
2543
- "loss": 5.3373,
2544
- "step": 3620
2545
- },
2546
- {
2547
- "epoch": 7.594142259414226,
2548
- "grad_norm": 4.4985198974609375,
2549
- "learning_rate": 2.847546728971963e-06,
2550
- "loss": 5.3015,
2551
- "step": 3630
2552
- },
2553
- {
2554
- "epoch": 7.615062761506276,
2555
- "grad_norm": 4.389716148376465,
2556
- "learning_rate": 2.7015186915887853e-06,
2557
- "loss": 5.3138,
2558
- "step": 3640
2559
- },
2560
- {
2561
- "epoch": 7.635983263598327,
2562
- "grad_norm": 4.401087284088135,
2563
- "learning_rate": 2.5554906542056076e-06,
2564
- "loss": 5.3495,
2565
- "step": 3650
2566
- },
2567
- {
2568
- "epoch": 7.656903765690377,
2569
- "grad_norm": 4.640440464019775,
2570
- "learning_rate": 2.40946261682243e-06,
2571
- "loss": 5.3288,
2572
- "step": 3660
2573
- },
2574
- {
2575
- "epoch": 7.677824267782427,
2576
- "grad_norm": 4.51832389831543,
2577
- "learning_rate": 2.2634345794392526e-06,
2578
- "loss": 5.3584,
2579
- "step": 3670
2580
- },
2581
- {
2582
- "epoch": 7.698744769874477,
2583
- "grad_norm": 4.653135299682617,
2584
- "learning_rate": 2.117406542056075e-06,
2585
- "loss": 5.3177,
2586
- "step": 3680
2587
- },
2588
- {
2589
- "epoch": 7.7196652719665275,
2590
- "grad_norm": 4.688188076019287,
2591
- "learning_rate": 1.971378504672897e-06,
2592
- "loss": 5.6142,
2593
- "step": 3690
2594
- },
2595
- {
2596
- "epoch": 7.7405857740585775,
2597
- "grad_norm": 4.401451587677002,
2598
- "learning_rate": 1.8253504672897197e-06,
2599
- "loss": 5.3313,
2600
- "step": 3700
2601
- },
2602
- {
2603
- "epoch": 7.7615062761506275,
2604
- "grad_norm": 4.2343058586120605,
2605
- "learning_rate": 1.6793224299065422e-06,
2606
- "loss": 5.3242,
2607
- "step": 3710
2608
- },
2609
- {
2610
- "epoch": 7.7824267782426775,
2611
- "grad_norm": 4.382846832275391,
2612
- "learning_rate": 1.5332943925233645e-06,
2613
- "loss": 5.3136,
2614
- "step": 3720
2615
- },
2616
- {
2617
- "epoch": 7.803347280334728,
2618
- "grad_norm": 4.36667537689209,
2619
- "learning_rate": 1.387266355140187e-06,
2620
- "loss": 5.3365,
2621
- "step": 3730
2622
- },
2623
- {
2624
- "epoch": 7.824267782426778,
2625
- "grad_norm": 4.2489142417907715,
2626
- "learning_rate": 1.2412383177570093e-06,
2627
- "loss": 5.3519,
2628
- "step": 3740
2629
- },
2630
- {
2631
- "epoch": 7.845188284518828,
2632
- "grad_norm": 4.181556224822998,
2633
- "learning_rate": 1.0952102803738318e-06,
2634
- "loss": 5.3193,
2635
- "step": 3750
2636
- },
2637
- {
2638
- "epoch": 7.866108786610878,
2639
- "grad_norm": 4.455552101135254,
2640
- "learning_rate": 9.491822429906542e-07,
2641
- "loss": 5.3517,
2642
- "step": 3760
2643
- },
2644
- {
2645
- "epoch": 7.887029288702929,
2646
- "grad_norm": 4.552452564239502,
2647
- "learning_rate": 8.031542056074766e-07,
2648
- "loss": 5.3122,
2649
- "step": 3770
2650
- },
2651
- {
2652
- "epoch": 7.907949790794979,
2653
- "grad_norm": 4.30491304397583,
2654
- "learning_rate": 6.571261682242991e-07,
2655
- "loss": 5.3714,
2656
- "step": 3780
2657
- },
2658
- {
2659
- "epoch": 7.928870292887029,
2660
- "grad_norm": 4.551990985870361,
2661
- "learning_rate": 5.110981308411215e-07,
2662
- "loss": 5.3822,
2663
- "step": 3790
2664
- },
2665
- {
2666
- "epoch": 7.949790794979079,
2667
- "grad_norm": 4.361812591552734,
2668
- "learning_rate": 3.6507009345794396e-07,
2669
- "loss": 5.3352,
2670
- "step": 3800
2671
- },
2672
- {
2673
- "epoch": 7.97071129707113,
2674
- "grad_norm": 4.600877285003662,
2675
- "learning_rate": 2.1904205607476636e-07,
2676
- "loss": 5.34,
2677
- "step": 3810
2678
- },
2679
- {
2680
- "epoch": 7.99163179916318,
2681
- "grad_norm": 4.647894859313965,
2682
- "learning_rate": 7.301401869158879e-08,
2683
- "loss": 5.3474,
2684
- "step": 3820
2685
- }
2686
- ],
2687
- "logging_steps": 10,
2688
- "max_steps": 3824,
2689
- "num_input_tokens_seen": 0,
2690
- "num_train_epochs": 8,
2691
- "save_steps": 1000,
2692
- "stateful_callbacks": {
2693
- "TrainerControl": {
2694
- "args": {
2695
- "should_epoch_stop": false,
2696
- "should_evaluate": false,
2697
- "should_log": false,
2698
- "should_save": true,
2699
- "should_training_stop": true
2700
- },
2701
- "attributes": {}
2702
- }
2703
- },
2704
- "total_flos": 0.0,
2705
- "train_batch_size": 48,
2706
- "trial_name": null,
2707
- "trial_params": null
2708
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pretrain/one_image_layer4_pretrain_3824/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:544b047c6f9c7fa1a066fc7b8a9fe0ff8c0311e845b8aec90599ca9f06319245
3
- size 5624