Training in progress, epoch 0
Browse files- .gitattributes +1 -0
- added_tokens.json +24 -0
- config.json +29 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +208 -0
- trainer_log.jsonl +192 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "Qwen/Qwen2.5-7B-Instruct",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"Qwen2ForCausalLM"
|
| 5 |
+
],
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 151643,
|
| 8 |
+
"eos_token_id": 151645,
|
| 9 |
+
"hidden_act": "silu",
|
| 10 |
+
"hidden_size": 3584,
|
| 11 |
+
"initializer_range": 0.02,
|
| 12 |
+
"intermediate_size": 18944,
|
| 13 |
+
"max_position_embeddings": 32768,
|
| 14 |
+
"max_window_layers": 28,
|
| 15 |
+
"model_type": "qwen2",
|
| 16 |
+
"num_attention_heads": 28,
|
| 17 |
+
"num_hidden_layers": 28,
|
| 18 |
+
"num_key_value_heads": 4,
|
| 19 |
+
"rms_norm_eps": 1e-06,
|
| 20 |
+
"rope_scaling": null,
|
| 21 |
+
"rope_theta": 1000000.0,
|
| 22 |
+
"sliding_window": null,
|
| 23 |
+
"tie_word_embeddings": false,
|
| 24 |
+
"torch_dtype": "bfloat16",
|
| 25 |
+
"transformers_version": "4.46.1",
|
| 26 |
+
"use_cache": false,
|
| 27 |
+
"use_sliding_window": false,
|
| 28 |
+
"vocab_size": 152064
|
| 29 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9ba2adcc4d2e9ae891c4f5a5a6fd64e5076083c888d29df2019b87d083a77e19
|
| 3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:afeacd71499147c4e960f5f7369a44f17b4aaa7f5e13bfd7115e842f092bd479
|
| 3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6f0eb9dc9c454f9da1f815286b19c2a5fa99d0a3ed1ee979a3f7573682436229
|
| 3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ce7da6238616151c019f3b017abb744b65756ead503b55928b62a333bdc3c4bc
|
| 3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 15231233024
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
| 7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
| 345 |
+
}
|
| 346 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|endoftext|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
| 199 |
+
"clean_up_tokenization_spaces": false,
|
| 200 |
+
"eos_token": "<|endoftext|>",
|
| 201 |
+
"errors": "replace",
|
| 202 |
+
"model_max_length": 131072,
|
| 203 |
+
"pad_token": "<|endoftext|>",
|
| 204 |
+
"padding_side": "right",
|
| 205 |
+
"split_special_tokens": false,
|
| 206 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 207 |
+
"unk_token": null
|
| 208 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 10, "total_steps": 5424, "loss": 0.8665, "lr": 1.8416206261510132e-07, "epoch": 0.005529953917050691, "percentage": 0.18, "elapsed_time": "0:02:58", "remaining_time": "1 day, 2:49:33"}
|
| 2 |
+
{"current_steps": 20, "total_steps": 5424, "loss": 0.8531, "lr": 3.6832412523020263e-07, "epoch": 0.011059907834101382, "percentage": 0.37, "elapsed_time": "0:05:50", "remaining_time": "1 day, 2:18:19"}
|
| 3 |
+
{"current_steps": 30, "total_steps": 5424, "loss": 0.8281, "lr": 5.524861878453039e-07, "epoch": 0.016589861751152075, "percentage": 0.55, "elapsed_time": "0:08:44", "remaining_time": "1 day, 2:10:31"}
|
| 4 |
+
{"current_steps": 40, "total_steps": 5424, "loss": 0.7589, "lr": 7.366482504604053e-07, "epoch": 0.022119815668202765, "percentage": 0.74, "elapsed_time": "0:11:36", "remaining_time": "1 day, 2:02:31"}
|
| 5 |
+
{"current_steps": 50, "total_steps": 5424, "loss": 0.715, "lr": 9.208103130755065e-07, "epoch": 0.027649769585253458, "percentage": 0.92, "elapsed_time": "0:14:25", "remaining_time": "1 day, 1:50:58"}
|
| 6 |
+
{"current_steps": 60, "total_steps": 5424, "loss": 0.6922, "lr": 1.1049723756906078e-06, "epoch": 0.03317972350230415, "percentage": 1.11, "elapsed_time": "0:17:07", "remaining_time": "1 day, 1:30:40"}
|
| 7 |
+
{"current_steps": 70, "total_steps": 5424, "loss": 0.6497, "lr": 1.289134438305709e-06, "epoch": 0.03870967741935484, "percentage": 1.29, "elapsed_time": "0:19:59", "remaining_time": "1 day, 1:29:08"}
|
| 8 |
+
{"current_steps": 80, "total_steps": 5424, "loss": 0.6281, "lr": 1.4732965009208105e-06, "epoch": 0.04423963133640553, "percentage": 1.47, "elapsed_time": "0:22:48", "remaining_time": "1 day, 1:23:39"}
|
| 9 |
+
{"current_steps": 90, "total_steps": 5424, "loss": 0.611, "lr": 1.6574585635359118e-06, "epoch": 0.04976958525345622, "percentage": 1.66, "elapsed_time": "0:25:32", "remaining_time": "1 day, 1:14:14"}
|
| 10 |
+
{"current_steps": 100, "total_steps": 5424, "loss": 0.594, "lr": 1.841620626151013e-06, "epoch": 0.055299539170506916, "percentage": 1.84, "elapsed_time": "0:28:14", "remaining_time": "1 day, 1:03:51"}
|
| 11 |
+
{"current_steps": 110, "total_steps": 5424, "loss": 0.5946, "lr": 2.0257826887661147e-06, "epoch": 0.060829493087557605, "percentage": 2.03, "elapsed_time": "0:31:05", "remaining_time": "1 day, 1:02:10"}
|
| 12 |
+
{"current_steps": 120, "total_steps": 5424, "loss": 0.5758, "lr": 2.2099447513812157e-06, "epoch": 0.0663594470046083, "percentage": 2.21, "elapsed_time": "0:34:00", "remaining_time": "1 day, 1:03:16"}
|
| 13 |
+
{"current_steps": 130, "total_steps": 5424, "loss": 0.5774, "lr": 2.394106813996317e-06, "epoch": 0.07188940092165899, "percentage": 2.4, "elapsed_time": "0:36:49", "remaining_time": "1 day, 0:59:24"}
|
| 14 |
+
{"current_steps": 140, "total_steps": 5424, "loss": 0.5811, "lr": 2.578268876611418e-06, "epoch": 0.07741935483870968, "percentage": 2.58, "elapsed_time": "0:39:39", "remaining_time": "1 day, 0:56:40"}
|
| 15 |
+
{"current_steps": 150, "total_steps": 5424, "loss": 0.5567, "lr": 2.7624309392265196e-06, "epoch": 0.08294930875576037, "percentage": 2.77, "elapsed_time": "0:42:25", "remaining_time": "1 day, 0:51:29"}
|
| 16 |
+
{"current_steps": 160, "total_steps": 5424, "loss": 0.5558, "lr": 2.946593001841621e-06, "epoch": 0.08847926267281106, "percentage": 2.95, "elapsed_time": "0:45:15", "remaining_time": "1 day, 0:48:58"}
|
| 17 |
+
{"current_steps": 170, "total_steps": 5424, "loss": 0.5493, "lr": 3.130755064456722e-06, "epoch": 0.09400921658986175, "percentage": 3.13, "elapsed_time": "0:47:59", "remaining_time": "1 day, 0:42:59"}
|
| 18 |
+
{"current_steps": 180, "total_steps": 5424, "loss": 0.5517, "lr": 3.3149171270718235e-06, "epoch": 0.09953917050691244, "percentage": 3.32, "elapsed_time": "0:50:50", "remaining_time": "1 day, 0:41:21"}
|
| 19 |
+
{"current_steps": 190, "total_steps": 5424, "loss": 0.5463, "lr": 3.4990791896869245e-06, "epoch": 0.10506912442396313, "percentage": 3.5, "elapsed_time": "0:53:39", "remaining_time": "1 day, 0:37:55"}
|
| 20 |
+
{"current_steps": 200, "total_steps": 5424, "loss": 0.547, "lr": 3.683241252302026e-06, "epoch": 0.11059907834101383, "percentage": 3.69, "elapsed_time": "0:56:23", "remaining_time": "1 day, 0:32:52"}
|
| 21 |
+
{"current_steps": 210, "total_steps": 5424, "loss": 0.5393, "lr": 3.867403314917128e-06, "epoch": 0.11612903225806452, "percentage": 3.87, "elapsed_time": "0:59:12", "remaining_time": "1 day, 0:30:03"}
|
| 22 |
+
{"current_steps": 220, "total_steps": 5424, "loss": 0.5329, "lr": 4.051565377532229e-06, "epoch": 0.12165898617511521, "percentage": 4.06, "elapsed_time": "1:01:58", "remaining_time": "1 day, 0:26:00"}
|
| 23 |
+
{"current_steps": 230, "total_steps": 5424, "loss": 0.5431, "lr": 4.23572744014733e-06, "epoch": 0.1271889400921659, "percentage": 4.24, "elapsed_time": "1:04:50", "remaining_time": "1 day, 0:24:21"}
|
| 24 |
+
{"current_steps": 240, "total_steps": 5424, "loss": 0.5381, "lr": 4.419889502762431e-06, "epoch": 0.1327188940092166, "percentage": 4.42, "elapsed_time": "1:07:35", "remaining_time": "1 day, 0:19:55"}
|
| 25 |
+
{"current_steps": 250, "total_steps": 5424, "loss": 0.5339, "lr": 4.604051565377533e-06, "epoch": 0.1382488479262673, "percentage": 4.61, "elapsed_time": "1:10:14", "remaining_time": "1 day, 0:13:41"}
|
| 26 |
+
{"current_steps": 260, "total_steps": 5424, "loss": 0.5337, "lr": 4.788213627992634e-06, "epoch": 0.14377880184331798, "percentage": 4.79, "elapsed_time": "1:13:00", "remaining_time": "1 day, 0:10:12"}
|
| 27 |
+
{"current_steps": 270, "total_steps": 5424, "loss": 0.5325, "lr": 4.972375690607736e-06, "epoch": 0.14930875576036867, "percentage": 4.98, "elapsed_time": "1:15:44", "remaining_time": "1 day, 0:05:53"}
|
| 28 |
+
{"current_steps": 280, "total_steps": 5424, "loss": 0.5347, "lr": 5.156537753222836e-06, "epoch": 0.15483870967741936, "percentage": 5.16, "elapsed_time": "1:18:34", "remaining_time": "1 day, 0:03:39"}
|
| 29 |
+
{"current_steps": 290, "total_steps": 5424, "loss": 0.5264, "lr": 5.340699815837938e-06, "epoch": 0.16036866359447005, "percentage": 5.35, "elapsed_time": "1:21:23", "remaining_time": "1 day, 0:00:52"}
|
| 30 |
+
{"current_steps": 300, "total_steps": 5424, "loss": 0.5295, "lr": 5.524861878453039e-06, "epoch": 0.16589861751152074, "percentage": 5.53, "elapsed_time": "1:24:19", "remaining_time": "1 day, 0:00:15"}
|
| 31 |
+
{"current_steps": 310, "total_steps": 5424, "loss": 0.5267, "lr": 5.709023941068141e-06, "epoch": 0.17142857142857143, "percentage": 5.72, "elapsed_time": "1:27:09", "remaining_time": "23:57:50"}
|
| 32 |
+
{"current_steps": 320, "total_steps": 5424, "loss": 0.5208, "lr": 5.893186003683242e-06, "epoch": 0.17695852534562212, "percentage": 5.9, "elapsed_time": "1:29:58", "remaining_time": "23:55:12"}
|
| 33 |
+
{"current_steps": 330, "total_steps": 5424, "loss": 0.5188, "lr": 6.077348066298343e-06, "epoch": 0.1824884792626728, "percentage": 6.08, "elapsed_time": "1:32:47", "remaining_time": "23:52:19"}
|
| 34 |
+
{"current_steps": 340, "total_steps": 5424, "loss": 0.5154, "lr": 6.261510128913444e-06, "epoch": 0.1880184331797235, "percentage": 6.27, "elapsed_time": "1:35:30", "remaining_time": "23:48:05"}
|
| 35 |
+
{"current_steps": 350, "total_steps": 5424, "loss": 0.5048, "lr": 6.445672191528546e-06, "epoch": 0.1935483870967742, "percentage": 6.45, "elapsed_time": "1:38:14", "remaining_time": "23:44:19"}
|
| 36 |
+
{"current_steps": 360, "total_steps": 5424, "loss": 0.5128, "lr": 6.629834254143647e-06, "epoch": 0.19907834101382488, "percentage": 6.64, "elapsed_time": "1:41:05", "remaining_time": "23:42:08"}
|
| 37 |
+
{"current_steps": 370, "total_steps": 5424, "loss": 0.5178, "lr": 6.8139963167587485e-06, "epoch": 0.20460829493087557, "percentage": 6.82, "elapsed_time": "1:43:57", "remaining_time": "23:39:58"}
|
| 38 |
+
{"current_steps": 380, "total_steps": 5424, "loss": 0.514, "lr": 6.998158379373849e-06, "epoch": 0.21013824884792626, "percentage": 7.01, "elapsed_time": "1:46:43", "remaining_time": "23:36:34"}
|
| 39 |
+
{"current_steps": 390, "total_steps": 5424, "loss": 0.5085, "lr": 7.182320441988951e-06, "epoch": 0.21566820276497695, "percentage": 7.19, "elapsed_time": "1:49:32", "remaining_time": "23:33:49"}
|
| 40 |
+
{"current_steps": 400, "total_steps": 5424, "loss": 0.5139, "lr": 7.366482504604052e-06, "epoch": 0.22119815668202766, "percentage": 7.37, "elapsed_time": "1:52:20", "remaining_time": "23:31:01"}
|
| 41 |
+
{"current_steps": 410, "total_steps": 5424, "loss": 0.5103, "lr": 7.5506445672191534e-06, "epoch": 0.22672811059907835, "percentage": 7.56, "elapsed_time": "1:55:05", "remaining_time": "23:27:34"}
|
| 42 |
+
{"current_steps": 420, "total_steps": 5424, "loss": 0.506, "lr": 7.734806629834256e-06, "epoch": 0.23225806451612904, "percentage": 7.74, "elapsed_time": "1:57:54", "remaining_time": "23:24:44"}
|
| 43 |
+
{"current_steps": 430, "total_steps": 5424, "loss": 0.508, "lr": 7.918968692449355e-06, "epoch": 0.23778801843317973, "percentage": 7.93, "elapsed_time": "2:00:34", "remaining_time": "23:20:16"}
|
| 44 |
+
{"current_steps": 440, "total_steps": 5424, "loss": 0.5053, "lr": 8.103130755064459e-06, "epoch": 0.24331797235023042, "percentage": 8.11, "elapsed_time": "2:03:11", "remaining_time": "23:15:25"}
|
| 45 |
+
{"current_steps": 450, "total_steps": 5424, "loss": 0.505, "lr": 8.287292817679558e-06, "epoch": 0.2488479262672811, "percentage": 8.3, "elapsed_time": "2:06:06", "remaining_time": "23:13:52"}
|
| 46 |
+
{"current_steps": 460, "total_steps": 5424, "loss": 0.4994, "lr": 8.47145488029466e-06, "epoch": 0.2543778801843318, "percentage": 8.48, "elapsed_time": "2:08:52", "remaining_time": "23:10:39"}
|
| 47 |
+
{"current_steps": 470, "total_steps": 5424, "loss": 0.5003, "lr": 8.655616942909761e-06, "epoch": 0.25990783410138246, "percentage": 8.67, "elapsed_time": "2:11:41", "remaining_time": "23:08:04"}
|
| 48 |
+
{"current_steps": 480, "total_steps": 5424, "loss": 0.4971, "lr": 8.839779005524863e-06, "epoch": 0.2654377880184332, "percentage": 8.85, "elapsed_time": "2:14:28", "remaining_time": "23:05:08"}
|
| 49 |
+
{"current_steps": 490, "total_steps": 5424, "loss": 0.5032, "lr": 9.023941068139964e-06, "epoch": 0.2709677419354839, "percentage": 9.03, "elapsed_time": "2:17:17", "remaining_time": "23:02:29"}
|
| 50 |
+
{"current_steps": 500, "total_steps": 5424, "loss": 0.4999, "lr": 9.208103130755066e-06, "epoch": 0.2764976958525346, "percentage": 9.22, "elapsed_time": "2:19:57", "remaining_time": "22:58:16"}
|
| 51 |
+
{"current_steps": 510, "total_steps": 5424, "loss": 0.5035, "lr": 9.392265193370167e-06, "epoch": 0.2820276497695853, "percentage": 9.4, "elapsed_time": "2:22:48", "remaining_time": "22:56:00"}
|
| 52 |
+
{"current_steps": 520, "total_steps": 5424, "loss": 0.4999, "lr": 9.576427255985269e-06, "epoch": 0.28755760368663597, "percentage": 9.59, "elapsed_time": "2:25:36", "remaining_time": "22:53:08"}
|
| 53 |
+
{"current_steps": 530, "total_steps": 5424, "loss": 0.5089, "lr": 9.760589318600368e-06, "epoch": 0.29308755760368665, "percentage": 9.77, "elapsed_time": "2:28:32", "remaining_time": "22:51:35"}
|
| 54 |
+
{"current_steps": 540, "total_steps": 5424, "loss": 0.499, "lr": 9.944751381215471e-06, "epoch": 0.29861751152073734, "percentage": 9.96, "elapsed_time": "2:31:20", "remaining_time": "22:48:52"}
|
| 55 |
+
{"current_steps": 550, "total_steps": 5424, "loss": 0.5006, "lr": 9.999949252169092e-06, "epoch": 0.30414746543778803, "percentage": 10.14, "elapsed_time": "2:34:07", "remaining_time": "22:45:52"}
|
| 56 |
+
{"current_steps": 560, "total_steps": 5424, "loss": 0.4882, "lr": 9.999700693844523e-06, "epoch": 0.3096774193548387, "percentage": 10.32, "elapsed_time": "2:36:52", "remaining_time": "22:42:34"}
|
| 57 |
+
{"current_steps": 570, "total_steps": 5424, "loss": 0.4916, "lr": 9.99924501428021e-06, "epoch": 0.3152073732718894, "percentage": 10.51, "elapsed_time": "2:39:40", "remaining_time": "22:39:47"}
|
| 58 |
+
{"current_steps": 580, "total_steps": 5424, "loss": 0.4924, "lr": 9.99858223235347e-06, "epoch": 0.3207373271889401, "percentage": 10.69, "elapsed_time": "2:42:27", "remaining_time": "22:36:46"}
|
| 59 |
+
{"current_steps": 590, "total_steps": 5424, "loss": 0.4895, "lr": 9.997712375521187e-06, "epoch": 0.3262672811059908, "percentage": 10.88, "elapsed_time": "2:45:13", "remaining_time": "22:33:47"}
|
| 60 |
+
{"current_steps": 600, "total_steps": 5424, "loss": 0.4933, "lr": 9.996635479818683e-06, "epoch": 0.3317972350230415, "percentage": 11.06, "elapsed_time": "2:48:03", "remaining_time": "22:31:12"}
|
| 61 |
+
{"current_steps": 610, "total_steps": 5424, "loss": 0.4954, "lr": 9.995351589858227e-06, "epoch": 0.33732718894009217, "percentage": 11.25, "elapsed_time": "2:50:51", "remaining_time": "22:28:19"}
|
| 62 |
+
{"current_steps": 620, "total_steps": 5424, "loss": 0.4918, "lr": 9.993860758827171e-06, "epoch": 0.34285714285714286, "percentage": 11.43, "elapsed_time": "2:53:38", "remaining_time": "22:25:29"}
|
| 63 |
+
{"current_steps": 630, "total_steps": 5424, "loss": 0.4928, "lr": 9.992163048485776e-06, "epoch": 0.34838709677419355, "percentage": 11.62, "elapsed_time": "2:56:29", "remaining_time": "22:22:57"}
|
| 64 |
+
{"current_steps": 640, "total_steps": 5424, "loss": 0.4838, "lr": 9.990258529164618e-06, "epoch": 0.35391705069124424, "percentage": 11.8, "elapsed_time": "2:59:16", "remaining_time": "22:20:06"}
|
| 65 |
+
{"current_steps": 650, "total_steps": 5424, "loss": 0.488, "lr": 9.988147279761706e-06, "epoch": 0.35944700460829493, "percentage": 11.98, "elapsed_time": "3:02:10", "remaining_time": "22:18:03"}
|
| 66 |
+
{"current_steps": 660, "total_steps": 5424, "loss": 0.4985, "lr": 9.985829387739192e-06, "epoch": 0.3649769585253456, "percentage": 12.17, "elapsed_time": "3:05:03", "remaining_time": "22:15:48"}
|
| 67 |
+
{"current_steps": 670, "total_steps": 5424, "loss": 0.4915, "lr": 9.98330494911976e-06, "epoch": 0.3705069124423963, "percentage": 12.35, "elapsed_time": "3:07:55", "remaining_time": "22:13:22"}
|
| 68 |
+
{"current_steps": 680, "total_steps": 5424, "loss": 0.4878, "lr": 9.980574068482635e-06, "epoch": 0.376036866359447, "percentage": 12.54, "elapsed_time": "3:10:39", "remaining_time": "22:10:10"}
|
| 69 |
+
{"current_steps": 690, "total_steps": 5424, "loss": 0.4957, "lr": 9.977636858959274e-06, "epoch": 0.3815668202764977, "percentage": 12.72, "elapsed_time": "3:13:18", "remaining_time": "22:06:15"}
|
| 70 |
+
{"current_steps": 700, "total_steps": 5424, "loss": 0.4871, "lr": 9.974493442228653e-06, "epoch": 0.3870967741935484, "percentage": 12.91, "elapsed_time": "3:16:08", "remaining_time": "22:03:42"}
|
| 71 |
+
{"current_steps": 710, "total_steps": 5424, "loss": 0.4839, "lr": 9.971143948512239e-06, "epoch": 0.39262672811059907, "percentage": 13.09, "elapsed_time": "3:18:51", "remaining_time": "22:00:19"}
|
| 72 |
+
{"current_steps": 720, "total_steps": 5424, "loss": 0.4817, "lr": 9.967588516568601e-06, "epoch": 0.39815668202764976, "percentage": 13.27, "elapsed_time": "3:21:39", "remaining_time": "21:57:30"}
|
| 73 |
+
{"current_steps": 730, "total_steps": 5424, "loss": 0.4926, "lr": 9.963827293687648e-06, "epoch": 0.40368663594470044, "percentage": 13.46, "elapsed_time": "3:24:24", "remaining_time": "21:54:22"}
|
| 74 |
+
{"current_steps": 740, "total_steps": 5424, "loss": 0.4855, "lr": 9.959860435684534e-06, "epoch": 0.40921658986175113, "percentage": 13.64, "elapsed_time": "3:27:17", "remaining_time": "21:52:04"}
|
| 75 |
+
{"current_steps": 750, "total_steps": 5424, "loss": 0.4909, "lr": 9.95568810689321e-06, "epoch": 0.4147465437788018, "percentage": 13.83, "elapsed_time": "3:30:02", "remaining_time": "21:49:00"}
|
| 76 |
+
{"current_steps": 760, "total_steps": 5424, "loss": 0.4808, "lr": 9.951310480159605e-06, "epoch": 0.4202764976958525, "percentage": 14.01, "elapsed_time": "3:32:54", "remaining_time": "21:46:33"}
|
| 77 |
+
{"current_steps": 770, "total_steps": 5424, "loss": 0.4816, "lr": 9.946727736834467e-06, "epoch": 0.4258064516129032, "percentage": 14.2, "elapsed_time": "3:35:43", "remaining_time": "21:43:50"}
|
| 78 |
+
{"current_steps": 780, "total_steps": 5424, "loss": 0.4888, "lr": 9.94194006676586e-06, "epoch": 0.4313364055299539, "percentage": 14.38, "elapsed_time": "3:38:26", "remaining_time": "21:40:36"}
|
| 79 |
+
{"current_steps": 790, "total_steps": 5424, "loss": 0.4877, "lr": 9.936947668291284e-06, "epoch": 0.4368663594470046, "percentage": 14.56, "elapsed_time": "3:41:13", "remaining_time": "21:37:40"}
|
| 80 |
+
{"current_steps": 800, "total_steps": 5424, "loss": 0.4804, "lr": 9.931750748229475e-06, "epoch": 0.4423963133640553, "percentage": 14.75, "elapsed_time": "3:44:01", "remaining_time": "21:34:52"}
|
| 81 |
+
{"current_steps": 810, "total_steps": 5424, "loss": 0.4795, "lr": 9.926349521871824e-06, "epoch": 0.447926267281106, "percentage": 14.93, "elapsed_time": "3:46:43", "remaining_time": "21:31:27"}
|
| 82 |
+
{"current_steps": 820, "total_steps": 5424, "loss": 0.4756, "lr": 9.920744212973468e-06, "epoch": 0.4534562211981567, "percentage": 15.12, "elapsed_time": "3:49:28", "remaining_time": "21:28:22"}
|
| 83 |
+
{"current_steps": 830, "total_steps": 5424, "loss": 0.4852, "lr": 9.91493505374401e-06, "epoch": 0.4589861751152074, "percentage": 15.3, "elapsed_time": "3:52:17", "remaining_time": "21:25:45"}
|
| 84 |
+
{"current_steps": 840, "total_steps": 5424, "loss": 0.4816, "lr": 9.908922284837911e-06, "epoch": 0.4645161290322581, "percentage": 15.49, "elapsed_time": "3:55:07", "remaining_time": "21:23:06"}
|
| 85 |
+
{"current_steps": 850, "total_steps": 5424, "loss": 0.485, "lr": 9.90270615534451e-06, "epoch": 0.4700460829493088, "percentage": 15.67, "elapsed_time": "3:57:52", "remaining_time": "21:20:00"}
|
| 86 |
+
{"current_steps": 860, "total_steps": 5424, "loss": 0.4791, "lr": 9.89628692277771e-06, "epoch": 0.47557603686635946, "percentage": 15.86, "elapsed_time": "4:00:38", "remaining_time": "21:17:04"}
|
| 87 |
+
{"current_steps": 870, "total_steps": 5424, "loss": 0.4749, "lr": 9.889664853065315e-06, "epoch": 0.48110599078341015, "percentage": 16.04, "elapsed_time": "4:03:26", "remaining_time": "21:14:18"}
|
| 88 |
+
{"current_steps": 880, "total_steps": 5424, "loss": 0.4847, "lr": 9.882840220538002e-06, "epoch": 0.48663594470046084, "percentage": 16.22, "elapsed_time": "4:06:12", "remaining_time": "21:11:21"}
|
| 89 |
+
{"current_steps": 890, "total_steps": 5424, "loss": 0.4823, "lr": 9.875813307917966e-06, "epoch": 0.49216589861751153, "percentage": 16.41, "elapsed_time": "4:08:57", "remaining_time": "21:08:17"}
|
| 90 |
+
{"current_steps": 900, "total_steps": 5424, "loss": 0.4791, "lr": 9.8685844063072e-06, "epoch": 0.4976958525345622, "percentage": 16.59, "elapsed_time": "4:11:45", "remaining_time": "21:05:29"}
|
| 91 |
+
{"current_steps": 910, "total_steps": 5424, "loss": 0.48, "lr": 9.86115381517545e-06, "epoch": 0.5032258064516129, "percentage": 16.78, "elapsed_time": "4:14:34", "remaining_time": "21:02:50"}
|
| 92 |
+
{"current_steps": 920, "total_steps": 5424, "loss": 0.4821, "lr": 9.853521842347787e-06, "epoch": 0.5087557603686635, "percentage": 16.96, "elapsed_time": "4:17:25", "remaining_time": "21:00:18"}
|
| 93 |
+
{"current_steps": 930, "total_steps": 5424, "loss": 0.4778, "lr": 9.845688803991873e-06, "epoch": 0.5142857142857142, "percentage": 17.15, "elapsed_time": "4:20:12", "remaining_time": "20:57:21"}
|
| 94 |
+
{"current_steps": 940, "total_steps": 5424, "loss": 0.4771, "lr": 9.837655024604856e-06, "epoch": 0.5198156682027649, "percentage": 17.33, "elapsed_time": "4:22:55", "remaining_time": "20:54:11"}
|
| 95 |
+
{"current_steps": 950, "total_steps": 5424, "loss": 0.4711, "lr": 9.82942083699993e-06, "epoch": 0.5253456221198156, "percentage": 17.51, "elapsed_time": "4:25:40", "remaining_time": "20:51:11"}
|
| 96 |
+
{"current_steps": 960, "total_steps": 5424, "loss": 0.4773, "lr": 9.82098658229254e-06, "epoch": 0.5308755760368664, "percentage": 17.7, "elapsed_time": "4:28:25", "remaining_time": "20:48:10"}
|
| 97 |
+
{"current_steps": 970, "total_steps": 5424, "loss": 0.4633, "lr": 9.812352609886261e-06, "epoch": 0.5364055299539171, "percentage": 17.88, "elapsed_time": "4:31:09", "remaining_time": "20:45:03"}
|
| 98 |
+
{"current_steps": 980, "total_steps": 5424, "loss": 0.4786, "lr": 9.803519277458323e-06, "epoch": 0.5419354838709678, "percentage": 18.07, "elapsed_time": "4:33:58", "remaining_time": "20:42:22"}
|
| 99 |
+
{"current_steps": 990, "total_steps": 5424, "loss": 0.4706, "lr": 9.794486950944775e-06, "epoch": 0.5474654377880185, "percentage": 18.25, "elapsed_time": "4:36:46", "remaining_time": "20:39:35"}
|
| 100 |
+
{"current_steps": 1000, "total_steps": 5424, "loss": 0.472, "lr": 9.785256004525357e-06, "epoch": 0.5529953917050692, "percentage": 18.44, "elapsed_time": "4:39:34", "remaining_time": "20:36:48"}
|
| 101 |
+
{"current_steps": 1010, "total_steps": 5424, "loss": 0.4684, "lr": 9.775826820607967e-06, "epoch": 0.5585253456221199, "percentage": 18.62, "elapsed_time": "4:42:25", "remaining_time": "20:34:17"}
|
| 102 |
+
{"current_steps": 1020, "total_steps": 5424, "loss": 0.4705, "lr": 9.766199789812845e-06, "epoch": 0.5640552995391706, "percentage": 18.81, "elapsed_time": "4:45:13", "remaining_time": "20:31:30"}
|
| 103 |
+
{"current_steps": 1030, "total_steps": 5424, "loss": 0.4759, "lr": 9.756375310956375e-06, "epoch": 0.5695852534562212, "percentage": 18.99, "elapsed_time": "4:48:06", "remaining_time": "20:29:02"}
|
| 104 |
+
{"current_steps": 1040, "total_steps": 5424, "loss": 0.468, "lr": 9.746353791034566e-06, "epoch": 0.5751152073732719, "percentage": 19.17, "elapsed_time": "4:50:46", "remaining_time": "20:25:43"}
|
| 105 |
+
{"current_steps": 1050, "total_steps": 5424, "loss": 0.4718, "lr": 9.736135645206198e-06, "epoch": 0.5806451612903226, "percentage": 19.36, "elapsed_time": "4:53:34", "remaining_time": "20:22:57"}
|
| 106 |
+
{"current_steps": 1060, "total_steps": 5424, "loss": 0.4727, "lr": 9.725721296775616e-06, "epoch": 0.5861751152073733, "percentage": 19.54, "elapsed_time": "4:56:22", "remaining_time": "20:20:12"}
|
| 107 |
+
{"current_steps": 1070, "total_steps": 5424, "loss": 0.4697, "lr": 9.7151111771752e-06, "epoch": 0.591705069124424, "percentage": 19.73, "elapsed_time": "4:59:08", "remaining_time": "20:17:14"}
|
| 108 |
+
{"current_steps": 1080, "total_steps": 5424, "loss": 0.4698, "lr": 9.704305725947483e-06, "epoch": 0.5972350230414747, "percentage": 19.91, "elapsed_time": "5:01:57", "remaining_time": "20:14:30"}
|
| 109 |
+
{"current_steps": 1090, "total_steps": 5424, "loss": 0.4685, "lr": 9.693305390726952e-06, "epoch": 0.6027649769585254, "percentage": 20.1, "elapsed_time": "5:04:53", "remaining_time": "20:12:15"}
|
| 110 |
+
{"current_steps": 1100, "total_steps": 5424, "loss": 0.4654, "lr": 9.682110627221503e-06, "epoch": 0.6082949308755761, "percentage": 20.28, "elapsed_time": "5:07:33", "remaining_time": "20:09:00"}
|
| 111 |
+
{"current_steps": 1110, "total_steps": 5424, "loss": 0.4769, "lr": 9.670721899193556e-06, "epoch": 0.6138248847926268, "percentage": 20.46, "elapsed_time": "5:10:17", "remaining_time": "20:05:58"}
|
| 112 |
+
{"current_steps": 1120, "total_steps": 5424, "loss": 0.4684, "lr": 9.659139678440845e-06, "epoch": 0.6193548387096774, "percentage": 20.65, "elapsed_time": "5:13:01", "remaining_time": "20:02:54"}
|
| 113 |
+
{"current_steps": 1130, "total_steps": 5424, "loss": 0.478, "lr": 9.647364444776877e-06, "epoch": 0.6248847926267281, "percentage": 20.83, "elapsed_time": "5:15:43", "remaining_time": "19:59:43"}
|
| 114 |
+
{"current_steps": 1140, "total_steps": 5424, "loss": 0.4731, "lr": 9.635396686011052e-06, "epoch": 0.6304147465437788, "percentage": 21.02, "elapsed_time": "5:18:34", "remaining_time": "19:57:09"}
|
| 115 |
+
{"current_steps": 1150, "total_steps": 5424, "loss": 0.4685, "lr": 9.623236897928456e-06, "epoch": 0.6359447004608295, "percentage": 21.2, "elapsed_time": "5:21:15", "remaining_time": "19:53:58"}
|
| 116 |
+
{"current_steps": 1160, "total_steps": 5424, "loss": 0.4659, "lr": 9.61088558426932e-06, "epoch": 0.6414746543778802, "percentage": 21.39, "elapsed_time": "5:24:01", "remaining_time": "19:51:04"}
|
| 117 |
+
{"current_steps": 1170, "total_steps": 5424, "loss": 0.4709, "lr": 9.59834325670815e-06, "epoch": 0.6470046082949309, "percentage": 21.57, "elapsed_time": "5:26:51", "remaining_time": "19:48:23"}
|
| 118 |
+
{"current_steps": 1180, "total_steps": 5424, "loss": 0.4751, "lr": 9.58561043483254e-06, "epoch": 0.6525345622119816, "percentage": 21.76, "elapsed_time": "5:29:40", "remaining_time": "19:45:43"}
|
| 119 |
+
{"current_steps": 1190, "total_steps": 5424, "loss": 0.4732, "lr": 9.572687646121632e-06, "epoch": 0.6580645161290323, "percentage": 21.94, "elapsed_time": "5:32:25", "remaining_time": "19:42:44"}
|
| 120 |
+
{"current_steps": 1200, "total_steps": 5424, "loss": 0.4665, "lr": 9.559575425924279e-06, "epoch": 0.663594470046083, "percentage": 22.12, "elapsed_time": "5:35:08", "remaining_time": "19:39:41"}
|
| 121 |
+
{"current_steps": 1210, "total_steps": 5424, "loss": 0.4608, "lr": 9.546274317436858e-06, "epoch": 0.6691244239631337, "percentage": 22.31, "elapsed_time": "5:37:56", "remaining_time": "19:36:54"}
|
| 122 |
+
{"current_steps": 1220, "total_steps": 5424, "loss": 0.4759, "lr": 9.53278487168077e-06, "epoch": 0.6746543778801843, "percentage": 22.49, "elapsed_time": "5:40:44", "remaining_time": "19:34:10"}
|
| 123 |
+
{"current_steps": 1230, "total_steps": 5424, "loss": 0.4662, "lr": 9.519107647479609e-06, "epoch": 0.680184331797235, "percentage": 22.68, "elapsed_time": "5:43:35", "remaining_time": "19:31:32"}
|
| 124 |
+
{"current_steps": 1240, "total_steps": 5424, "loss": 0.4594, "lr": 9.505243211436023e-06, "epoch": 0.6857142857142857, "percentage": 22.86, "elapsed_time": "5:46:20", "remaining_time": "19:28:35"}
|
| 125 |
+
{"current_steps": 1250, "total_steps": 5424, "loss": 0.4715, "lr": 9.49119213790823e-06, "epoch": 0.6912442396313364, "percentage": 23.05, "elapsed_time": "5:49:02", "remaining_time": "19:25:31"}
|
| 126 |
+
{"current_steps": 1260, "total_steps": 5424, "loss": 0.4683, "lr": 9.476955008986228e-06, "epoch": 0.6967741935483871, "percentage": 23.23, "elapsed_time": "5:51:45", "remaining_time": "19:22:27"}
|
| 127 |
+
{"current_steps": 1270, "total_steps": 5424, "loss": 0.4633, "lr": 9.46253241446768e-06, "epoch": 0.7023041474654378, "percentage": 23.41, "elapsed_time": "5:54:35", "remaining_time": "19:19:50"}
|
| 128 |
+
{"current_steps": 1280, "total_steps": 5424, "loss": 0.4656, "lr": 9.447924951833483e-06, "epoch": 0.7078341013824885, "percentage": 23.6, "elapsed_time": "5:57:23", "remaining_time": "19:17:03"}
|
| 129 |
+
{"current_steps": 1290, "total_steps": 5424, "loss": 0.4768, "lr": 9.433133226223018e-06, "epoch": 0.7133640552995392, "percentage": 23.78, "elapsed_time": "6:00:06", "remaining_time": "19:14:02"}
|
| 130 |
+
{"current_steps": 1300, "total_steps": 5424, "loss": 0.4669, "lr": 9.418157850409075e-06, "epoch": 0.7188940092165899, "percentage": 23.97, "elapsed_time": "6:02:57", "remaining_time": "19:11:25"}
|
| 131 |
+
{"current_steps": 1310, "total_steps": 5424, "loss": 0.4765, "lr": 9.40299944477247e-06, "epoch": 0.7244239631336405, "percentage": 24.15, "elapsed_time": "6:05:38", "remaining_time": "19:08:17"}
|
| 132 |
+
{"current_steps": 1320, "total_steps": 5424, "loss": 0.4655, "lr": 9.387658637276348e-06, "epoch": 0.7299539170506912, "percentage": 24.34, "elapsed_time": "6:08:33", "remaining_time": "19:05:51"}
|
| 133 |
+
{"current_steps": 1330, "total_steps": 5424, "loss": 0.4669, "lr": 9.372136063440165e-06, "epoch": 0.7354838709677419, "percentage": 24.52, "elapsed_time": "6:11:15", "remaining_time": "19:02:48"}
|
| 134 |
+
{"current_steps": 1340, "total_steps": 5424, "loss": 0.4601, "lr": 9.356432366313362e-06, "epoch": 0.7410138248847926, "percentage": 24.71, "elapsed_time": "6:14:01", "remaining_time": "18:59:55"}
|
| 135 |
+
{"current_steps": 1350, "total_steps": 5424, "loss": 0.4762, "lr": 9.340548196448729e-06, "epoch": 0.7465437788018433, "percentage": 24.89, "elapsed_time": "6:16:46", "remaining_time": "18:57:00"}
|
| 136 |
+
{"current_steps": 1360, "total_steps": 5424, "loss": 0.4639, "lr": 9.324484211875442e-06, "epoch": 0.752073732718894, "percentage": 25.07, "elapsed_time": "6:19:33", "remaining_time": "18:54:12"}
|
| 137 |
+
{"current_steps": 1370, "total_steps": 5424, "loss": 0.4654, "lr": 9.30824107807182e-06, "epoch": 0.7576036866359447, "percentage": 25.26, "elapsed_time": "6:22:19", "remaining_time": "18:51:20"}
|
| 138 |
+
{"current_steps": 1380, "total_steps": 5424, "loss": 0.4611, "lr": 9.291819467937746e-06, "epoch": 0.7631336405529954, "percentage": 25.44, "elapsed_time": "6:25:03", "remaining_time": "18:48:22"}
|
| 139 |
+
{"current_steps": 1390, "total_steps": 5424, "loss": 0.4619, "lr": 9.275220061766793e-06, "epoch": 0.7686635944700461, "percentage": 25.63, "elapsed_time": "6:27:48", "remaining_time": "18:45:29"}
|
| 140 |
+
{"current_steps": 1400, "total_steps": 5424, "loss": 0.4601, "lr": 9.258443547218041e-06, "epoch": 0.7741935483870968, "percentage": 25.81, "elapsed_time": "6:30:35", "remaining_time": "18:42:40"}
|
| 141 |
+
{"current_steps": 1410, "total_steps": 5424, "loss": 0.4671, "lr": 9.241490619287593e-06, "epoch": 0.7797235023041474, "percentage": 26.0, "elapsed_time": "6:33:22", "remaining_time": "18:39:51"}
|
| 142 |
+
{"current_steps": 1420, "total_steps": 5424, "loss": 0.4631, "lr": 9.224361980279779e-06, "epoch": 0.7852534562211981, "percentage": 26.18, "elapsed_time": "6:36:14", "remaining_time": "18:37:16"}
|
| 143 |
+
{"current_steps": 1430, "total_steps": 5424, "loss": 0.4681, "lr": 9.207058339778065e-06, "epoch": 0.7907834101382488, "percentage": 26.36, "elapsed_time": "6:38:59", "remaining_time": "18:34:22"}
|
| 144 |
+
{"current_steps": 1440, "total_steps": 5424, "loss": 0.457, "lr": 9.189580414615658e-06, "epoch": 0.7963133640552995, "percentage": 26.55, "elapsed_time": "6:41:52", "remaining_time": "18:31:50"}
|
| 145 |
+
{"current_steps": 1450, "total_steps": 5424, "loss": 0.4541, "lr": 9.171928928845802e-06, "epoch": 0.8018433179723502, "percentage": 26.73, "elapsed_time": "6:44:41", "remaining_time": "18:29:07"}
|
| 146 |
+
{"current_steps": 1460, "total_steps": 5424, "loss": 0.4575, "lr": 9.154104613711798e-06, "epoch": 0.8073732718894009, "percentage": 26.92, "elapsed_time": "6:47:29", "remaining_time": "18:26:22"}
|
| 147 |
+
{"current_steps": 1470, "total_steps": 5424, "loss": 0.4695, "lr": 9.136108207616694e-06, "epoch": 0.8129032258064516, "percentage": 27.1, "elapsed_time": "6:50:20", "remaining_time": "18:23:43"}
|
| 148 |
+
{"current_steps": 1480, "total_steps": 5424, "loss": 0.4659, "lr": 9.117940456092706e-06, "epoch": 0.8184331797235023, "percentage": 27.29, "elapsed_time": "6:53:08", "remaining_time": "18:20:58"}
|
| 149 |
+
{"current_steps": 1490, "total_steps": 5424, "loss": 0.458, "lr": 9.099602111770336e-06, "epoch": 0.823963133640553, "percentage": 27.47, "elapsed_time": "6:55:56", "remaining_time": "18:18:11"}
|
| 150 |
+
{"current_steps": 1500, "total_steps": 5424, "loss": 0.4646, "lr": 9.081093934347178e-06, "epoch": 0.8294930875576036, "percentage": 27.65, "elapsed_time": "6:58:43", "remaining_time": "18:15:23"}
|
| 151 |
+
{"current_steps": 1510, "total_steps": 5424, "loss": 0.4602, "lr": 9.062416690556463e-06, "epoch": 0.8350230414746543, "percentage": 27.84, "elapsed_time": "7:01:38", "remaining_time": "18:12:54"}
|
| 152 |
+
{"current_steps": 1520, "total_steps": 5424, "loss": 0.4543, "lr": 9.043571154135285e-06, "epoch": 0.840552995391705, "percentage": 28.02, "elapsed_time": "7:04:24", "remaining_time": "18:10:03"}
|
| 153 |
+
{"current_steps": 1530, "total_steps": 5424, "loss": 0.4669, "lr": 9.02455810579255e-06, "epoch": 0.8460829493087557, "percentage": 28.21, "elapsed_time": "7:07:10", "remaining_time": "18:07:10"}
|
| 154 |
+
{"current_steps": 1540, "total_steps": 5424, "loss": 0.4616, "lr": 9.005378333176637e-06, "epoch": 0.8516129032258064, "percentage": 28.39, "elapsed_time": "7:10:06", "remaining_time": "18:04:46"}
|
| 155 |
+
{"current_steps": 1550, "total_steps": 5424, "loss": 0.4623, "lr": 8.986032630842767e-06, "epoch": 0.8571428571428571, "percentage": 28.58, "elapsed_time": "7:12:56", "remaining_time": "18:02:04"}
|
| 156 |
+
{"current_steps": 1560, "total_steps": 5424, "loss": 0.4629, "lr": 8.966521800220084e-06, "epoch": 0.8626728110599078, "percentage": 28.76, "elapsed_time": "7:15:52", "remaining_time": "17:59:38"}
|
| 157 |
+
{"current_steps": 1570, "total_steps": 5424, "loss": 0.4606, "lr": 8.946846649578457e-06, "epoch": 0.8682027649769585, "percentage": 28.95, "elapsed_time": "7:18:41", "remaining_time": "17:56:53"}
|
| 158 |
+
{"current_steps": 1580, "total_steps": 5424, "loss": 0.4562, "lr": 8.927007993994997e-06, "epoch": 0.8737327188940092, "percentage": 29.13, "elapsed_time": "7:21:29", "remaining_time": "17:54:05"}
|
| 159 |
+
{"current_steps": 1590, "total_steps": 5424, "loss": 0.4558, "lr": 8.907006655320287e-06, "epoch": 0.8792626728110599, "percentage": 29.31, "elapsed_time": "7:24:16", "remaining_time": "17:51:17"}
|
| 160 |
+
{"current_steps": 1600, "total_steps": 5424, "loss": 0.4605, "lr": 8.886843462144343e-06, "epoch": 0.8847926267281107, "percentage": 29.5, "elapsed_time": "7:27:05", "remaining_time": "17:48:31"}
|
| 161 |
+
{"current_steps": 1610, "total_steps": 5424, "loss": 0.464, "lr": 8.866519249762275e-06, "epoch": 0.8903225806451613, "percentage": 29.68, "elapsed_time": "7:29:56", "remaining_time": "17:45:52"}
|
| 162 |
+
{"current_steps": 1620, "total_steps": 5424, "loss": 0.465, "lr": 8.846034860139706e-06, "epoch": 0.895852534562212, "percentage": 29.87, "elapsed_time": "7:32:46", "remaining_time": "17:43:11"}
|
| 163 |
+
{"current_steps": 1630, "total_steps": 5424, "loss": 0.4545, "lr": 8.82539114187786e-06, "epoch": 0.9013824884792627, "percentage": 30.05, "elapsed_time": "7:35:34", "remaining_time": "17:40:23"}
|
| 164 |
+
{"current_steps": 1640, "total_steps": 5424, "loss": 0.4592, "lr": 8.804588950178439e-06, "epoch": 0.9069124423963134, "percentage": 30.24, "elapsed_time": "7:38:24", "remaining_time": "17:37:41"}
|
| 165 |
+
{"current_steps": 1650, "total_steps": 5424, "loss": 0.4623, "lr": 8.783629146808175e-06, "epoch": 0.9124423963133641, "percentage": 30.42, "elapsed_time": "7:41:11", "remaining_time": "17:34:52"}
|
| 166 |
+
{"current_steps": 1660, "total_steps": 5424, "loss": 0.4597, "lr": 8.762512600063136e-06, "epoch": 0.9179723502304148, "percentage": 30.6, "elapsed_time": "7:44:03", "remaining_time": "17:32:13"}
|
| 167 |
+
{"current_steps": 1670, "total_steps": 5424, "loss": 0.4538, "lr": 8.74124018473276e-06, "epoch": 0.9235023041474655, "percentage": 30.79, "elapsed_time": "7:46:51", "remaining_time": "17:29:26"}
|
| 168 |
+
{"current_steps": 1680, "total_steps": 5424, "loss": 0.4547, "lr": 8.719812782063603e-06, "epoch": 0.9290322580645162, "percentage": 30.97, "elapsed_time": "7:49:31", "remaining_time": "17:26:21"}
|
| 169 |
+
{"current_steps": 1690, "total_steps": 5424, "loss": 0.462, "lr": 8.698231279722845e-06, "epoch": 0.9345622119815669, "percentage": 31.16, "elapsed_time": "7:52:18", "remaining_time": "17:23:32"}
|
| 170 |
+
{"current_steps": 1700, "total_steps": 5424, "loss": 0.4513, "lr": 8.676496571761507e-06, "epoch": 0.9400921658986175, "percentage": 31.34, "elapsed_time": "7:55:09", "remaining_time": "17:20:53"}
|
| 171 |
+
{"current_steps": 1710, "total_steps": 5424, "loss": 0.4574, "lr": 8.65460955857742e-06, "epoch": 0.9456221198156682, "percentage": 31.53, "elapsed_time": "7:57:59", "remaining_time": "17:18:10"}
|
| 172 |
+
{"current_steps": 1720, "total_steps": 5424, "loss": 0.4587, "lr": 8.632571146877924e-06, "epoch": 0.9511520737327189, "percentage": 31.71, "elapsed_time": "8:00:44", "remaining_time": "17:15:17"}
|
| 173 |
+
{"current_steps": 1730, "total_steps": 5424, "loss": 0.4614, "lr": 8.6103822496423e-06, "epoch": 0.9566820276497696, "percentage": 31.9, "elapsed_time": "8:03:36", "remaining_time": "17:12:38"}
|
| 174 |
+
{"current_steps": 1740, "total_steps": 5424, "loss": 0.4561, "lr": 8.588043786083952e-06, "epoch": 0.9622119815668203, "percentage": 32.08, "elapsed_time": "8:06:28", "remaining_time": "17:10:00"}
|
| 175 |
+
{"current_steps": 1750, "total_steps": 5424, "loss": 0.4521, "lr": 8.565556681612335e-06, "epoch": 0.967741935483871, "percentage": 32.26, "elapsed_time": "8:09:16", "remaining_time": "17:07:11"}
|
| 176 |
+
{"current_steps": 1760, "total_steps": 5424, "loss": 0.4534, "lr": 8.542921867794597e-06, "epoch": 0.9732718894009217, "percentage": 32.45, "elapsed_time": "8:12:11", "remaining_time": "17:04:39"}
|
| 177 |
+
{"current_steps": 1770, "total_steps": 5424, "loss": 0.4561, "lr": 8.520140282317018e-06, "epoch": 0.9788018433179724, "percentage": 32.63, "elapsed_time": "8:14:58", "remaining_time": "17:01:49"}
|
| 178 |
+
{"current_steps": 1780, "total_steps": 5424, "loss": 0.4496, "lr": 8.497212868946132e-06, "epoch": 0.9843317972350231, "percentage": 32.82, "elapsed_time": "8:17:42", "remaining_time": "16:58:54"}
|
| 179 |
+
{"current_steps": 1790, "total_steps": 5424, "loss": 0.4512, "lr": 8.474140577489652e-06, "epoch": 0.9898617511520738, "percentage": 33.0, "elapsed_time": "8:20:26", "remaining_time": "16:55:59"}
|
| 180 |
+
{"current_steps": 1800, "total_steps": 5424, "loss": 0.447, "lr": 8.45092436375712e-06, "epoch": 0.9953917050691244, "percentage": 33.19, "elapsed_time": "8:23:19", "remaining_time": "16:53:21"}
|
| 181 |
+
{"current_steps": 1810, "total_steps": 5424, "loss": 0.4858, "lr": 8.4275651895203e-06, "epoch": 1.0009216589861751, "percentage": 33.37, "elapsed_time": "8:27:12", "remaining_time": "16:52:43"}
|
| 182 |
+
{"current_steps": 1820, "total_steps": 5424, "loss": 0.433, "lr": 8.404064022473344e-06, "epoch": 1.0064516129032257, "percentage": 33.55, "elapsed_time": "8:29:59", "remaining_time": "16:49:53"}
|
| 183 |
+
{"current_steps": 1830, "total_steps": 5424, "loss": 0.4299, "lr": 8.380421836192705e-06, "epoch": 1.0119815668202765, "percentage": 33.74, "elapsed_time": "8:32:45", "remaining_time": "16:47:01"}
|
| 184 |
+
{"current_steps": 1840, "total_steps": 5424, "loss": 0.4372, "lr": 8.356639610096799e-06, "epoch": 1.017511520737327, "percentage": 33.92, "elapsed_time": "8:35:34", "remaining_time": "16:44:15"}
|
| 185 |
+
{"current_steps": 1850, "total_steps": 5424, "loss": 0.4153, "lr": 8.33271832940543e-06, "epoch": 1.023041474654378, "percentage": 34.11, "elapsed_time": "8:38:21", "remaining_time": "16:41:24"}
|
| 186 |
+
{"current_steps": 1860, "total_steps": 5424, "loss": 0.4365, "lr": 8.308658985098983e-06, "epoch": 1.0285714285714285, "percentage": 34.29, "elapsed_time": "8:41:09", "remaining_time": "16:38:36"}
|
| 187 |
+
{"current_steps": 1870, "total_steps": 5424, "loss": 0.4414, "lr": 8.284462573877367e-06, "epoch": 1.0341013824884793, "percentage": 34.48, "elapsed_time": "8:43:57", "remaining_time": "16:35:48"}
|
| 188 |
+
{"current_steps": 1880, "total_steps": 5424, "loss": 0.4373, "lr": 8.260130098118724e-06, "epoch": 1.0396313364055298, "percentage": 34.66, "elapsed_time": "8:46:49", "remaining_time": "16:33:07"}
|
| 189 |
+
{"current_steps": 1890, "total_steps": 5424, "loss": 0.446, "lr": 8.235662565837901e-06, "epoch": 1.0451612903225806, "percentage": 34.85, "elapsed_time": "8:49:40", "remaining_time": "16:30:24"}
|
| 190 |
+
{"current_steps": 1900, "total_steps": 5424, "loss": 0.4293, "lr": 8.211060990644699e-06, "epoch": 1.0506912442396312, "percentage": 35.03, "elapsed_time": "8:52:23", "remaining_time": "16:27:27"}
|
| 191 |
+
{"current_steps": 1910, "total_steps": 5424, "loss": 0.4306, "lr": 8.18632639170188e-06, "epoch": 1.056221198156682, "percentage": 35.21, "elapsed_time": "8:55:11", "remaining_time": "16:24:38"}
|
| 192 |
+
{"current_steps": 1920, "total_steps": 5424, "loss": 0.4338, "lr": 8.161459793682937e-06, "epoch": 1.0617511520737328, "percentage": 35.4, "elapsed_time": "8:57:57", "remaining_time": "16:21:45"}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a9d89518bcf7b346f6785c34c599b3dc74473fb0100c864675e86449f6a668f1
|
| 3 |
+
size 7288
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|