Training in progress, epoch 1
Browse files- .gitattributes +1 -0
- added_tokens.json +24 -0
- config.json +29 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +208 -0
- trainer_log.jsonl +127 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "Qwen/Qwen2.5-7B-Instruct",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"Qwen2ForCausalLM"
|
| 5 |
+
],
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 151643,
|
| 8 |
+
"eos_token_id": 151645,
|
| 9 |
+
"hidden_act": "silu",
|
| 10 |
+
"hidden_size": 3584,
|
| 11 |
+
"initializer_range": 0.02,
|
| 12 |
+
"intermediate_size": 18944,
|
| 13 |
+
"max_position_embeddings": 32768,
|
| 14 |
+
"max_window_layers": 28,
|
| 15 |
+
"model_type": "qwen2",
|
| 16 |
+
"num_attention_heads": 28,
|
| 17 |
+
"num_hidden_layers": 28,
|
| 18 |
+
"num_key_value_heads": 4,
|
| 19 |
+
"rms_norm_eps": 1e-06,
|
| 20 |
+
"rope_scaling": null,
|
| 21 |
+
"rope_theta": 1000000.0,
|
| 22 |
+
"sliding_window": null,
|
| 23 |
+
"tie_word_embeddings": false,
|
| 24 |
+
"torch_dtype": "bfloat16",
|
| 25 |
+
"transformers_version": "4.46.1",
|
| 26 |
+
"use_cache": false,
|
| 27 |
+
"use_sliding_window": false,
|
| 28 |
+
"vocab_size": 152064
|
| 29 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b40baa79e355f518385f2f71be7e6a9951642e8e39a1d7b72e0b0658dac4e50c
|
| 3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:26a0a2f1ea5ba7a783d9838fb0c53d4480e3b1b6d136633ebf783f041c61e0e4
|
| 3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:796cc19851ee8a9ea39b107e7004ca271d3ba982ffe54a9a5b224919885caf33
|
| 3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:00bed98febbca8d425f5042b8c97f35251780f38a86a3266fb5d0348efbb611d
|
| 3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 15231233024
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
| 7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
| 345 |
+
}
|
| 346 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|endoftext|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
| 199 |
+
"clean_up_tokenization_spaces": false,
|
| 200 |
+
"eos_token": "<|endoftext|>",
|
| 201 |
+
"errors": "replace",
|
| 202 |
+
"model_max_length": 131072,
|
| 203 |
+
"pad_token": "<|endoftext|>",
|
| 204 |
+
"padding_side": "right",
|
| 205 |
+
"split_special_tokens": false,
|
| 206 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 207 |
+
"unk_token": null
|
| 208 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 1, "total_steps": 375, "loss": 1.2947, "lr": 2.6315789473684213e-07, "epoch": 0.008, "percentage": 0.27, "elapsed_time": "0:01:09", "remaining_time": "7:15:47"}
|
| 2 |
+
{"current_steps": 2, "total_steps": 375, "loss": 1.2776, "lr": 5.263157894736843e-07, "epoch": 0.016, "percentage": 0.53, "elapsed_time": "0:02:02", "remaining_time": "6:20:58"}
|
| 3 |
+
{"current_steps": 3, "total_steps": 375, "loss": 1.272, "lr": 7.894736842105263e-07, "epoch": 0.024, "percentage": 0.8, "elapsed_time": "0:02:52", "remaining_time": "5:57:07"}
|
| 4 |
+
{"current_steps": 4, "total_steps": 375, "loss": 1.2738, "lr": 1.0526315789473685e-06, "epoch": 0.032, "percentage": 1.07, "elapsed_time": "0:03:56", "remaining_time": "6:05:46"}
|
| 5 |
+
{"current_steps": 5, "total_steps": 375, "loss": 1.2432, "lr": 1.3157894736842106e-06, "epoch": 0.04, "percentage": 1.33, "elapsed_time": "0:04:56", "remaining_time": "6:06:11"}
|
| 6 |
+
{"current_steps": 6, "total_steps": 375, "loss": 1.2115, "lr": 1.5789473684210526e-06, "epoch": 0.048, "percentage": 1.6, "elapsed_time": "0:05:53", "remaining_time": "6:01:55"}
|
| 7 |
+
{"current_steps": 7, "total_steps": 375, "loss": 1.2493, "lr": 1.8421052631578948e-06, "epoch": 0.056, "percentage": 1.87, "elapsed_time": "0:06:39", "remaining_time": "5:50:27"}
|
| 8 |
+
{"current_steps": 8, "total_steps": 375, "loss": 1.2107, "lr": 2.105263157894737e-06, "epoch": 0.064, "percentage": 2.13, "elapsed_time": "0:07:35", "remaining_time": "5:48:08"}
|
| 9 |
+
{"current_steps": 9, "total_steps": 375, "loss": 1.1829, "lr": 2.368421052631579e-06, "epoch": 0.072, "percentage": 2.4, "elapsed_time": "0:08:32", "remaining_time": "5:47:24"}
|
| 10 |
+
{"current_steps": 10, "total_steps": 375, "loss": 1.1258, "lr": 2.631578947368421e-06, "epoch": 0.08, "percentage": 2.67, "elapsed_time": "0:09:18", "remaining_time": "5:40:02"}
|
| 11 |
+
{"current_steps": 11, "total_steps": 375, "loss": 1.1722, "lr": 2.8947368421052634e-06, "epoch": 0.088, "percentage": 2.93, "elapsed_time": "0:10:18", "remaining_time": "5:41:08"}
|
| 12 |
+
{"current_steps": 12, "total_steps": 375, "loss": 1.1329, "lr": 3.157894736842105e-06, "epoch": 0.096, "percentage": 3.2, "elapsed_time": "0:11:04", "remaining_time": "5:34:57"}
|
| 13 |
+
{"current_steps": 13, "total_steps": 375, "loss": 1.1283, "lr": 3.421052631578948e-06, "epoch": 0.104, "percentage": 3.47, "elapsed_time": "0:11:48", "remaining_time": "5:28:47"}
|
| 14 |
+
{"current_steps": 14, "total_steps": 375, "loss": 1.1362, "lr": 3.6842105263157896e-06, "epoch": 0.112, "percentage": 3.73, "elapsed_time": "0:12:48", "remaining_time": "5:30:28"}
|
| 15 |
+
{"current_steps": 15, "total_steps": 375, "loss": 1.122, "lr": 3.947368421052632e-06, "epoch": 0.12, "percentage": 4.0, "elapsed_time": "0:13:42", "remaining_time": "5:29:00"}
|
| 16 |
+
{"current_steps": 16, "total_steps": 375, "loss": 1.098, "lr": 4.210526315789474e-06, "epoch": 0.128, "percentage": 4.27, "elapsed_time": "0:14:34", "remaining_time": "5:26:54"}
|
| 17 |
+
{"current_steps": 17, "total_steps": 375, "loss": 1.0722, "lr": 4.473684210526316e-06, "epoch": 0.136, "percentage": 4.53, "elapsed_time": "0:15:26", "remaining_time": "5:25:08"}
|
| 18 |
+
{"current_steps": 18, "total_steps": 375, "loss": 1.0788, "lr": 4.736842105263158e-06, "epoch": 0.144, "percentage": 4.8, "elapsed_time": "0:16:16", "remaining_time": "5:22:39"}
|
| 19 |
+
{"current_steps": 19, "total_steps": 375, "loss": 1.0823, "lr": 5e-06, "epoch": 0.152, "percentage": 5.07, "elapsed_time": "0:17:14", "remaining_time": "5:23:03"}
|
| 20 |
+
{"current_steps": 20, "total_steps": 375, "loss": 1.0643, "lr": 5.263157894736842e-06, "epoch": 0.16, "percentage": 5.33, "elapsed_time": "0:18:08", "remaining_time": "5:21:52"}
|
| 21 |
+
{"current_steps": 21, "total_steps": 375, "loss": 1.0186, "lr": 5.526315789473685e-06, "epoch": 0.168, "percentage": 5.6, "elapsed_time": "0:18:58", "remaining_time": "5:19:56"}
|
| 22 |
+
{"current_steps": 22, "total_steps": 375, "loss": 1.0316, "lr": 5.789473684210527e-06, "epoch": 0.176, "percentage": 5.87, "elapsed_time": "0:19:51", "remaining_time": "5:18:34"}
|
| 23 |
+
{"current_steps": 23, "total_steps": 375, "loss": 1.0223, "lr": 6.0526315789473685e-06, "epoch": 0.184, "percentage": 6.13, "elapsed_time": "0:20:45", "remaining_time": "5:17:34"}
|
| 24 |
+
{"current_steps": 24, "total_steps": 375, "loss": 0.9821, "lr": 6.31578947368421e-06, "epoch": 0.192, "percentage": 6.4, "elapsed_time": "0:21:41", "remaining_time": "5:17:12"}
|
| 25 |
+
{"current_steps": 25, "total_steps": 375, "loss": 0.9551, "lr": 6.578947368421054e-06, "epoch": 0.2, "percentage": 6.67, "elapsed_time": "0:22:30", "remaining_time": "5:15:06"}
|
| 26 |
+
{"current_steps": 26, "total_steps": 375, "loss": 0.9725, "lr": 6.842105263157896e-06, "epoch": 0.208, "percentage": 6.93, "elapsed_time": "0:23:20", "remaining_time": "5:13:16"}
|
| 27 |
+
{"current_steps": 27, "total_steps": 375, "loss": 0.9684, "lr": 7.1052631578947375e-06, "epoch": 0.216, "percentage": 7.2, "elapsed_time": "0:24:14", "remaining_time": "5:12:27"}
|
| 28 |
+
{"current_steps": 28, "total_steps": 375, "loss": 0.9963, "lr": 7.368421052631579e-06, "epoch": 0.224, "percentage": 7.47, "elapsed_time": "0:25:15", "remaining_time": "5:13:03"}
|
| 29 |
+
{"current_steps": 29, "total_steps": 375, "loss": 1.0004, "lr": 7.631578947368423e-06, "epoch": 0.232, "percentage": 7.73, "elapsed_time": "0:26:12", "remaining_time": "5:12:42"}
|
| 30 |
+
{"current_steps": 30, "total_steps": 375, "loss": 0.9658, "lr": 7.894736842105265e-06, "epoch": 0.24, "percentage": 8.0, "elapsed_time": "0:27:04", "remaining_time": "5:11:26"}
|
| 31 |
+
{"current_steps": 31, "total_steps": 375, "loss": 0.9879, "lr": 8.157894736842106e-06, "epoch": 0.248, "percentage": 8.27, "elapsed_time": "0:28:04", "remaining_time": "5:11:32"}
|
| 32 |
+
{"current_steps": 32, "total_steps": 375, "loss": 0.9472, "lr": 8.421052631578948e-06, "epoch": 0.256, "percentage": 8.53, "elapsed_time": "0:28:57", "remaining_time": "5:10:22"}
|
| 33 |
+
{"current_steps": 33, "total_steps": 375, "loss": 0.978, "lr": 8.68421052631579e-06, "epoch": 0.264, "percentage": 8.8, "elapsed_time": "0:29:55", "remaining_time": "5:10:09"}
|
| 34 |
+
{"current_steps": 34, "total_steps": 375, "loss": 0.9468, "lr": 8.947368421052632e-06, "epoch": 0.272, "percentage": 9.07, "elapsed_time": "0:30:49", "remaining_time": "5:09:06"}
|
| 35 |
+
{"current_steps": 35, "total_steps": 375, "loss": 0.9643, "lr": 9.210526315789474e-06, "epoch": 0.28, "percentage": 9.33, "elapsed_time": "0:31:54", "remaining_time": "5:09:58"}
|
| 36 |
+
{"current_steps": 36, "total_steps": 375, "loss": 0.9534, "lr": 9.473684210526315e-06, "epoch": 0.288, "percentage": 9.6, "elapsed_time": "0:32:48", "remaining_time": "5:08:52"}
|
| 37 |
+
{"current_steps": 37, "total_steps": 375, "loss": 0.9389, "lr": 9.736842105263159e-06, "epoch": 0.296, "percentage": 9.87, "elapsed_time": "0:33:37", "remaining_time": "5:07:11"}
|
| 38 |
+
{"current_steps": 38, "total_steps": 375, "loss": 0.9065, "lr": 1e-05, "epoch": 0.304, "percentage": 10.13, "elapsed_time": "0:34:22", "remaining_time": "5:04:52"}
|
| 39 |
+
{"current_steps": 39, "total_steps": 375, "loss": 0.9469, "lr": 9.99978274148479e-06, "epoch": 0.312, "percentage": 10.4, "elapsed_time": "0:35:14", "remaining_time": "5:03:35"}
|
| 40 |
+
{"current_steps": 40, "total_steps": 375, "loss": 0.9243, "lr": 9.999130984819662e-06, "epoch": 0.32, "percentage": 10.67, "elapsed_time": "0:36:15", "remaining_time": "5:03:37"}
|
| 41 |
+
{"current_steps": 41, "total_steps": 375, "loss": 0.9615, "lr": 9.998044786644492e-06, "epoch": 0.328, "percentage": 10.93, "elapsed_time": "0:37:16", "remaining_time": "5:03:42"}
|
| 42 |
+
{"current_steps": 42, "total_steps": 375, "loss": 0.9437, "lr": 9.9965242413536e-06, "epoch": 0.336, "percentage": 11.2, "elapsed_time": "0:38:15", "remaining_time": "5:03:17"}
|
| 43 |
+
{"current_steps": 43, "total_steps": 375, "loss": 0.9486, "lr": 9.994569481087552e-06, "epoch": 0.344, "percentage": 11.47, "elapsed_time": "0:39:04", "remaining_time": "5:01:44"}
|
| 44 |
+
{"current_steps": 44, "total_steps": 375, "loss": 0.9382, "lr": 9.992180675721671e-06, "epoch": 0.352, "percentage": 11.73, "elapsed_time": "0:40:00", "remaining_time": "5:00:59"}
|
| 45 |
+
{"current_steps": 45, "total_steps": 375, "loss": 0.9179, "lr": 9.989358032851283e-06, "epoch": 0.36, "percentage": 12.0, "elapsed_time": "0:40:55", "remaining_time": "5:00:06"}
|
| 46 |
+
{"current_steps": 46, "total_steps": 375, "loss": 0.9395, "lr": 9.986101797773667e-06, "epoch": 0.368, "percentage": 12.27, "elapsed_time": "0:41:52", "remaining_time": "4:59:30"}
|
| 47 |
+
{"current_steps": 47, "total_steps": 375, "loss": 0.886, "lr": 9.98241225346674e-06, "epoch": 0.376, "percentage": 12.53, "elapsed_time": "0:42:46", "remaining_time": "4:58:30"}
|
| 48 |
+
{"current_steps": 48, "total_steps": 375, "loss": 0.8996, "lr": 9.978289720564471e-06, "epoch": 0.384, "percentage": 12.8, "elapsed_time": "0:43:39", "remaining_time": "4:57:27"}
|
| 49 |
+
{"current_steps": 49, "total_steps": 375, "loss": 0.9052, "lr": 9.97373455732901e-06, "epoch": 0.392, "percentage": 13.07, "elapsed_time": "0:44:25", "remaining_time": "4:55:36"}
|
| 50 |
+
{"current_steps": 50, "total_steps": 375, "loss": 0.9217, "lr": 9.968747159619556e-06, "epoch": 0.4, "percentage": 13.33, "elapsed_time": "0:45:21", "remaining_time": "4:54:47"}
|
| 51 |
+
{"current_steps": 51, "total_steps": 375, "loss": 0.9534, "lr": 9.963327960857962e-06, "epoch": 0.408, "percentage": 13.6, "elapsed_time": "0:46:19", "remaining_time": "4:54:17"}
|
| 52 |
+
{"current_steps": 52, "total_steps": 375, "loss": 0.904, "lr": 9.957477431991053e-06, "epoch": 0.416, "percentage": 13.87, "elapsed_time": "0:47:13", "remaining_time": "4:53:21"}
|
| 53 |
+
{"current_steps": 53, "total_steps": 375, "loss": 0.9093, "lr": 9.95119608144972e-06, "epoch": 0.424, "percentage": 14.13, "elapsed_time": "0:48:10", "remaining_time": "4:52:44"}
|
| 54 |
+
{"current_steps": 54, "total_steps": 375, "loss": 0.9236, "lr": 9.944484455104716e-06, "epoch": 0.432, "percentage": 14.4, "elapsed_time": "0:49:05", "remaining_time": "4:51:50"}
|
| 55 |
+
{"current_steps": 55, "total_steps": 375, "loss": 0.9295, "lr": 9.937343136219234e-06, "epoch": 0.44, "percentage": 14.67, "elapsed_time": "0:49:53", "remaining_time": "4:50:14"}
|
| 56 |
+
{"current_steps": 56, "total_steps": 375, "loss": 0.9171, "lr": 9.929772745398207e-06, "epoch": 0.448, "percentage": 14.93, "elapsed_time": "0:50:46", "remaining_time": "4:49:14"}
|
| 57 |
+
{"current_steps": 57, "total_steps": 375, "loss": 0.8842, "lr": 9.921773940534382e-06, "epoch": 0.456, "percentage": 15.2, "elapsed_time": "0:51:39", "remaining_time": "4:48:13"}
|
| 58 |
+
{"current_steps": 58, "total_steps": 375, "loss": 0.9079, "lr": 9.913347416751148e-06, "epoch": 0.464, "percentage": 15.47, "elapsed_time": "0:52:24", "remaining_time": "4:46:26"}
|
| 59 |
+
{"current_steps": 59, "total_steps": 375, "loss": 0.9433, "lr": 9.904493906342124e-06, "epoch": 0.472, "percentage": 15.73, "elapsed_time": "0:53:16", "remaining_time": "4:45:22"}
|
| 60 |
+
{"current_steps": 60, "total_steps": 375, "loss": 0.9086, "lr": 9.895214178707516e-06, "epoch": 0.48, "percentage": 16.0, "elapsed_time": "0:54:10", "remaining_time": "4:44:23"}
|
| 61 |
+
{"current_steps": 61, "total_steps": 375, "loss": 0.8941, "lr": 9.885509040287267e-06, "epoch": 0.488, "percentage": 16.27, "elapsed_time": "0:54:55", "remaining_time": "4:42:45"}
|
| 62 |
+
{"current_steps": 62, "total_steps": 375, "loss": 0.8547, "lr": 9.875379334490962e-06, "epoch": 0.496, "percentage": 16.53, "elapsed_time": "0:55:39", "remaining_time": "4:41:00"}
|
| 63 |
+
{"current_steps": 63, "total_steps": 375, "loss": 0.8925, "lr": 9.864825941624538e-06, "epoch": 0.504, "percentage": 16.8, "elapsed_time": "0:56:25", "remaining_time": "4:39:26"}
|
| 64 |
+
{"current_steps": 64, "total_steps": 375, "loss": 0.9073, "lr": 9.853849778813777e-06, "epoch": 0.512, "percentage": 17.07, "elapsed_time": "0:57:22", "remaining_time": "4:38:46"}
|
| 65 |
+
{"current_steps": 65, "total_steps": 375, "loss": 0.9398, "lr": 9.842451799924616e-06, "epoch": 0.52, "percentage": 17.33, "elapsed_time": "0:58:13", "remaining_time": "4:37:41"}
|
| 66 |
+
{"current_steps": 66, "total_steps": 375, "loss": 0.9149, "lr": 9.830632995480243e-06, "epoch": 0.528, "percentage": 17.6, "elapsed_time": "0:59:05", "remaining_time": "4:36:39"}
|
| 67 |
+
{"current_steps": 67, "total_steps": 375, "loss": 0.8812, "lr": 9.818394392575018e-06, "epoch": 0.536, "percentage": 17.87, "elapsed_time": "1:00:00", "remaining_time": "4:35:53"}
|
| 68 |
+
{"current_steps": 68, "total_steps": 375, "loss": 0.9074, "lr": 9.805737054785223e-06, "epoch": 0.544, "percentage": 18.13, "elapsed_time": "1:01:05", "remaining_time": "4:35:49"}
|
| 69 |
+
{"current_steps": 69, "total_steps": 375, "loss": 0.8983, "lr": 9.792662082076618e-06, "epoch": 0.552, "percentage": 18.4, "elapsed_time": "1:02:00", "remaining_time": "4:35:00"}
|
| 70 |
+
{"current_steps": 70, "total_steps": 375, "loss": 0.95, "lr": 9.779170610708872e-06, "epoch": 0.56, "percentage": 18.67, "elapsed_time": "1:02:54", "remaining_time": "4:34:04"}
|
| 71 |
+
{"current_steps": 71, "total_steps": 375, "loss": 0.8961, "lr": 9.765263813136796e-06, "epoch": 0.568, "percentage": 18.93, "elapsed_time": "1:03:54", "remaining_time": "4:33:36"}
|
| 72 |
+
{"current_steps": 72, "total_steps": 375, "loss": 0.8845, "lr": 9.750942897908468e-06, "epoch": 0.576, "percentage": 19.2, "elapsed_time": "1:04:55", "remaining_time": "4:33:13"}
|
| 73 |
+
{"current_steps": 73, "total_steps": 375, "loss": 0.9402, "lr": 9.736209109560201e-06, "epoch": 0.584, "percentage": 19.47, "elapsed_time": "1:05:53", "remaining_time": "4:32:34"}
|
| 74 |
+
{"current_steps": 74, "total_steps": 375, "loss": 0.8936, "lr": 9.721063728508384e-06, "epoch": 0.592, "percentage": 19.73, "elapsed_time": "1:06:41", "remaining_time": "4:31:16"}
|
| 75 |
+
{"current_steps": 75, "total_steps": 375, "loss": 0.9238, "lr": 9.705508070938219e-06, "epoch": 0.6, "percentage": 20.0, "elapsed_time": "1:07:30", "remaining_time": "4:30:02"}
|
| 76 |
+
{"current_steps": 76, "total_steps": 375, "loss": 0.8917, "lr": 9.689543488689332e-06, "epoch": 0.608, "percentage": 20.27, "elapsed_time": "1:08:27", "remaining_time": "4:29:18"}
|
| 77 |
+
{"current_steps": 77, "total_steps": 375, "loss": 0.9051, "lr": 9.673171369138297e-06, "epoch": 0.616, "percentage": 20.53, "elapsed_time": "1:09:21", "remaining_time": "4:28:26"}
|
| 78 |
+
{"current_steps": 78, "total_steps": 375, "loss": 0.9147, "lr": 9.656393135078067e-06, "epoch": 0.624, "percentage": 20.8, "elapsed_time": "1:10:18", "remaining_time": "4:27:41"}
|
| 79 |
+
{"current_steps": 79, "total_steps": 375, "loss": 0.878, "lr": 9.639210244594335e-06, "epoch": 0.632, "percentage": 21.07, "elapsed_time": "1:11:12", "remaining_time": "4:26:46"}
|
| 80 |
+
{"current_steps": 80, "total_steps": 375, "loss": 0.8919, "lr": 9.621624190938802e-06, "epoch": 0.64, "percentage": 21.33, "elapsed_time": "1:12:05", "remaining_time": "4:25:51"}
|
| 81 |
+
{"current_steps": 81, "total_steps": 375, "loss": 0.8745, "lr": 9.603636502399436e-06, "epoch": 0.648, "percentage": 21.6, "elapsed_time": "1:13:01", "remaining_time": "4:25:01"}
|
| 82 |
+
{"current_steps": 82, "total_steps": 375, "loss": 0.9138, "lr": 9.585248742167638e-06, "epoch": 0.656, "percentage": 21.87, "elapsed_time": "1:14:08", "remaining_time": "4:24:56"}
|
| 83 |
+
{"current_steps": 83, "total_steps": 375, "loss": 0.8924, "lr": 9.566462508202403e-06, "epoch": 0.664, "percentage": 22.13, "elapsed_time": "1:15:06", "remaining_time": "4:24:13"}
|
| 84 |
+
{"current_steps": 84, "total_steps": 375, "loss": 0.9034, "lr": 9.547279433091446e-06, "epoch": 0.672, "percentage": 22.4, "elapsed_time": "1:16:08", "remaining_time": "4:23:46"}
|
| 85 |
+
{"current_steps": 85, "total_steps": 375, "loss": 0.9433, "lr": 9.527701183909336e-06, "epoch": 0.68, "percentage": 22.67, "elapsed_time": "1:17:17", "remaining_time": "4:23:40"}
|
| 86 |
+
{"current_steps": 86, "total_steps": 375, "loss": 0.9063, "lr": 9.507729462072615e-06, "epoch": 0.688, "percentage": 22.93, "elapsed_time": "1:18:13", "remaining_time": "4:22:51"}
|
| 87 |
+
{"current_steps": 87, "total_steps": 375, "loss": 0.8911, "lr": 9.48736600319193e-06, "epoch": 0.696, "percentage": 23.2, "elapsed_time": "1:19:07", "remaining_time": "4:21:57"}
|
| 88 |
+
{"current_steps": 88, "total_steps": 375, "loss": 0.8723, "lr": 9.466612576921223e-06, "epoch": 0.704, "percentage": 23.47, "elapsed_time": "1:20:03", "remaining_time": "4:21:07"}
|
| 89 |
+
{"current_steps": 89, "total_steps": 375, "loss": 0.9128, "lr": 9.445470986803922e-06, "epoch": 0.712, "percentage": 23.73, "elapsed_time": "1:20:59", "remaining_time": "4:20:14"}
|
| 90 |
+
{"current_steps": 90, "total_steps": 375, "loss": 0.8976, "lr": 9.423943070116219e-06, "epoch": 0.72, "percentage": 24.0, "elapsed_time": "1:21:53", "remaining_time": "4:19:20"}
|
| 91 |
+
{"current_steps": 91, "total_steps": 375, "loss": 0.9017, "lr": 9.402030697707398e-06, "epoch": 0.728, "percentage": 24.27, "elapsed_time": "1:22:54", "remaining_time": "4:18:45"}
|
| 92 |
+
{"current_steps": 92, "total_steps": 375, "loss": 0.9186, "lr": 9.37973577383726e-06, "epoch": 0.736, "percentage": 24.53, "elapsed_time": "1:23:57", "remaining_time": "4:18:14"}
|
| 93 |
+
{"current_steps": 93, "total_steps": 375, "loss": 0.9105, "lr": 9.357060236010626e-06, "epoch": 0.744, "percentage": 24.8, "elapsed_time": "1:24:52", "remaining_time": "4:17:22"}
|
| 94 |
+
{"current_steps": 94, "total_steps": 375, "loss": 0.9206, "lr": 9.334006054808966e-06, "epoch": 0.752, "percentage": 25.07, "elapsed_time": "1:25:51", "remaining_time": "4:16:38"}
|
| 95 |
+
{"current_steps": 95, "total_steps": 375, "loss": 0.9198, "lr": 9.310575233719155e-06, "epoch": 0.76, "percentage": 25.33, "elapsed_time": "1:26:49", "remaining_time": "4:15:53"}
|
| 96 |
+
{"current_steps": 96, "total_steps": 375, "loss": 0.8981, "lr": 9.28676980895935e-06, "epoch": 0.768, "percentage": 25.6, "elapsed_time": "1:27:49", "remaining_time": "4:15:15"}
|
| 97 |
+
{"current_steps": 97, "total_steps": 375, "loss": 0.8867, "lr": 9.262591849302049e-06, "epoch": 0.776, "percentage": 25.87, "elapsed_time": "1:28:42", "remaining_time": "4:14:14"}
|
| 98 |
+
{"current_steps": 98, "total_steps": 375, "loss": 0.9053, "lr": 9.238043455894294e-06, "epoch": 0.784, "percentage": 26.13, "elapsed_time": "1:29:29", "remaining_time": "4:12:57"}
|
| 99 |
+
{"current_steps": 99, "total_steps": 375, "loss": 0.8963, "lr": 9.213126762075088e-06, "epoch": 0.792, "percentage": 26.4, "elapsed_time": "1:30:36", "remaining_time": "4:12:35"}
|
| 100 |
+
{"current_steps": 100, "total_steps": 375, "loss": 0.8234, "lr": 9.187843933189994e-06, "epoch": 0.8, "percentage": 26.67, "elapsed_time": "1:31:27", "remaining_time": "4:11:31"}
|
| 101 |
+
{"current_steps": 101, "total_steps": 375, "loss": 0.9065, "lr": 9.162197166402957e-06, "epoch": 0.808, "percentage": 26.93, "elapsed_time": "1:32:33", "remaining_time": "4:11:05"}
|
| 102 |
+
{"current_steps": 102, "total_steps": 375, "loss": 0.9195, "lr": 9.136188690505363e-06, "epoch": 0.816, "percentage": 27.2, "elapsed_time": "1:33:30", "remaining_time": "4:10:16"}
|
| 103 |
+
{"current_steps": 103, "total_steps": 375, "loss": 0.8867, "lr": 9.109820765722357e-06, "epoch": 0.824, "percentage": 27.47, "elapsed_time": "1:34:21", "remaining_time": "4:09:09"}
|
| 104 |
+
{"current_steps": 104, "total_steps": 375, "loss": 0.9039, "lr": 9.083095683516414e-06, "epoch": 0.832, "percentage": 27.73, "elapsed_time": "1:35:20", "remaining_time": "4:08:25"}
|
| 105 |
+
{"current_steps": 105, "total_steps": 375, "loss": 0.9004, "lr": 9.056015766388205e-06, "epoch": 0.84, "percentage": 28.0, "elapsed_time": "1:36:14", "remaining_time": "4:07:28"}
|
| 106 |
+
{"current_steps": 106, "total_steps": 375, "loss": 0.8644, "lr": 9.028583367674767e-06, "epoch": 0.848, "percentage": 28.27, "elapsed_time": "1:37:15", "remaining_time": "4:06:49"}
|
| 107 |
+
{"current_steps": 107, "total_steps": 375, "loss": 0.9033, "lr": 9.00080087134498e-06, "epoch": 0.856, "percentage": 28.53, "elapsed_time": "1:38:11", "remaining_time": "4:05:56"}
|
| 108 |
+
{"current_steps": 108, "total_steps": 375, "loss": 0.846, "lr": 8.972670691792409e-06, "epoch": 0.864, "percentage": 28.8, "elapsed_time": "1:38:54", "remaining_time": "4:04:31"}
|
| 109 |
+
{"current_steps": 109, "total_steps": 375, "loss": 0.9196, "lr": 8.944195273625472e-06, "epoch": 0.872, "percentage": 29.07, "elapsed_time": "1:39:51", "remaining_time": "4:03:41"}
|
| 110 |
+
{"current_steps": 110, "total_steps": 375, "loss": 0.8949, "lr": 8.915377091454992e-06, "epoch": 0.88, "percentage": 29.33, "elapsed_time": "1:40:51", "remaining_time": "4:02:57"}
|
| 111 |
+
{"current_steps": 111, "total_steps": 375, "loss": 0.8848, "lr": 8.886218649679162e-06, "epoch": 0.888, "percentage": 29.6, "elapsed_time": "1:41:49", "remaining_time": "4:02:11"}
|
| 112 |
+
{"current_steps": 112, "total_steps": 375, "loss": 0.8571, "lr": 8.856722482265886e-06, "epoch": 0.896, "percentage": 29.87, "elapsed_time": "1:42:51", "remaining_time": "4:01:32"}
|
| 113 |
+
{"current_steps": 113, "total_steps": 375, "loss": 0.874, "lr": 8.826891152532579e-06, "epoch": 0.904, "percentage": 30.13, "elapsed_time": "1:43:54", "remaining_time": "4:00:54"}
|
| 114 |
+
{"current_steps": 114, "total_steps": 375, "loss": 0.8715, "lr": 8.796727252923403e-06, "epoch": 0.912, "percentage": 30.4, "elapsed_time": "1:44:43", "remaining_time": "3:59:46"}
|
| 115 |
+
{"current_steps": 115, "total_steps": 375, "loss": 0.8836, "lr": 8.766233404783975e-06, "epoch": 0.92, "percentage": 30.67, "elapsed_time": "1:45:36", "remaining_time": "3:58:45"}
|
| 116 |
+
{"current_steps": 116, "total_steps": 375, "loss": 0.8686, "lr": 8.735412258133562e-06, "epoch": 0.928, "percentage": 30.93, "elapsed_time": "1:46:31", "remaining_time": "3:57:49"}
|
| 117 |
+
{"current_steps": 117, "total_steps": 375, "loss": 0.8661, "lr": 8.704266491434787e-06, "epoch": 0.936, "percentage": 31.2, "elapsed_time": "1:47:29", "remaining_time": "3:57:02"}
|
| 118 |
+
{"current_steps": 118, "total_steps": 375, "loss": 0.8357, "lr": 8.672798811360863e-06, "epoch": 0.944, "percentage": 31.47, "elapsed_time": "1:48:17", "remaining_time": "3:55:51"}
|
| 119 |
+
{"current_steps": 119, "total_steps": 375, "loss": 0.8542, "lr": 8.641011952560372e-06, "epoch": 0.952, "percentage": 31.73, "elapsed_time": "1:49:11", "remaining_time": "3:54:54"}
|
| 120 |
+
{"current_steps": 120, "total_steps": 375, "loss": 0.898, "lr": 8.608908677419606e-06, "epoch": 0.96, "percentage": 32.0, "elapsed_time": "1:50:01", "remaining_time": "3:53:47"}
|
| 121 |
+
{"current_steps": 121, "total_steps": 375, "loss": 0.8579, "lr": 8.576491775822527e-06, "epoch": 0.968, "percentage": 32.27, "elapsed_time": "1:50:53", "remaining_time": "3:52:47"}
|
| 122 |
+
{"current_steps": 122, "total_steps": 375, "loss": 0.8628, "lr": 8.543764064908295e-06, "epoch": 0.976, "percentage": 32.53, "elapsed_time": "1:51:47", "remaining_time": "3:51:50"}
|
| 123 |
+
{"current_steps": 123, "total_steps": 375, "loss": 0.8672, "lr": 8.510728388826464e-06, "epoch": 0.984, "percentage": 32.8, "elapsed_time": "1:52:37", "remaining_time": "3:50:44"}
|
| 124 |
+
{"current_steps": 124, "total_steps": 375, "loss": 0.849, "lr": 8.477387618489808e-06, "epoch": 0.992, "percentage": 33.07, "elapsed_time": "1:53:35", "remaining_time": "3:49:55"}
|
| 125 |
+
{"current_steps": 125, "total_steps": 375, "loss": 0.8512, "lr": 8.443744651324828e-06, "epoch": 1.0, "percentage": 33.33, "elapsed_time": "1:54:26", "remaining_time": "3:48:52"}
|
| 126 |
+
{"current_steps": 126, "total_steps": 375, "loss": 0.8784, "lr": 8.409802411019962e-06, "epoch": 1.008, "percentage": 33.6, "elapsed_time": "1:56:22", "remaining_time": "3:49:58"}
|
| 127 |
+
{"current_steps": 127, "total_steps": 375, "loss": 0.8431, "lr": 8.375563847271506e-06, "epoch": 1.016, "percentage": 33.87, "elapsed_time": "1:57:17", "remaining_time": "3:49:03"}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:28de54c028b05553bd07665131995b83d3f324a0548272765df62aef2ac36729
|
| 3 |
+
size 7416
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|