Training in progress, epoch 1
Browse files- .gitattributes +1 -0
- added_tokens.json +24 -0
- config.json +29 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +208 -0
- trainer_log.jsonl +159 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "Qwen/Qwen2.5-7B-Instruct",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"Qwen2ForCausalLM"
|
| 5 |
+
],
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 151643,
|
| 8 |
+
"eos_token_id": 151645,
|
| 9 |
+
"hidden_act": "silu",
|
| 10 |
+
"hidden_size": 3584,
|
| 11 |
+
"initializer_range": 0.02,
|
| 12 |
+
"intermediate_size": 18944,
|
| 13 |
+
"max_position_embeddings": 32768,
|
| 14 |
+
"max_window_layers": 28,
|
| 15 |
+
"model_type": "qwen2",
|
| 16 |
+
"num_attention_heads": 28,
|
| 17 |
+
"num_hidden_layers": 28,
|
| 18 |
+
"num_key_value_heads": 4,
|
| 19 |
+
"rms_norm_eps": 1e-06,
|
| 20 |
+
"rope_scaling": null,
|
| 21 |
+
"rope_theta": 1000000.0,
|
| 22 |
+
"sliding_window": null,
|
| 23 |
+
"tie_word_embeddings": false,
|
| 24 |
+
"torch_dtype": "bfloat16",
|
| 25 |
+
"transformers_version": "4.46.1",
|
| 26 |
+
"use_cache": false,
|
| 27 |
+
"use_sliding_window": false,
|
| 28 |
+
"vocab_size": 152064
|
| 29 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:59aa2b24523ad9381abc86991665b7b5ccac3e2a940c7558d3feefd8d02fd58a
|
| 3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9118e099c17134ba25c6716c9b26186230618e8864e7e96bd1ef249605206251
|
| 3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f74606d646708acb357b85b27de84695d5c67923fe7ef27021f5fea25532e094
|
| 3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fc94b6cbcbebdaf226ec494820092bf5b47f0785fb462d50b871e576a6feb3e4
|
| 3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 15231233024
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
| 7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
| 8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
| 250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
| 251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
| 252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
| 253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
| 260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
| 310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
| 311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
| 312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
| 313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
| 332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
| 334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
| 335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
| 336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
| 337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
| 344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
| 345 |
+
}
|
| 346 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|endoftext|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
| 199 |
+
"clean_up_tokenization_spaces": false,
|
| 200 |
+
"eos_token": "<|endoftext|>",
|
| 201 |
+
"errors": "replace",
|
| 202 |
+
"model_max_length": 131072,
|
| 203 |
+
"pad_token": "<|endoftext|>",
|
| 204 |
+
"padding_side": "right",
|
| 205 |
+
"split_special_tokens": false,
|
| 206 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 207 |
+
"unk_token": null
|
| 208 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 1, "total_steps": 234, "loss": 1.0868, "lr": 4.1666666666666667e-07, "epoch": 0.01279317697228145, "percentage": 0.43, "elapsed_time": "0:00:50", "remaining_time": "3:14:28"}
|
| 2 |
+
{"current_steps": 2, "total_steps": 234, "loss": 1.1044, "lr": 8.333333333333333e-07, "epoch": 0.0255863539445629, "percentage": 0.85, "elapsed_time": "0:01:34", "remaining_time": "3:02:12"}
|
| 3 |
+
{"current_steps": 3, "total_steps": 234, "loss": 1.1401, "lr": 1.25e-06, "epoch": 0.03837953091684435, "percentage": 1.28, "elapsed_time": "0:02:15", "remaining_time": "2:53:20"}
|
| 4 |
+
{"current_steps": 4, "total_steps": 234, "loss": 1.1168, "lr": 1.6666666666666667e-06, "epoch": 0.0511727078891258, "percentage": 1.71, "elapsed_time": "0:02:56", "remaining_time": "2:49:32"}
|
| 5 |
+
{"current_steps": 5, "total_steps": 234, "loss": 1.0934, "lr": 2.0833333333333334e-06, "epoch": 0.06396588486140725, "percentage": 2.14, "elapsed_time": "0:03:39", "remaining_time": "2:47:43"}
|
| 6 |
+
{"current_steps": 6, "total_steps": 234, "loss": 1.081, "lr": 2.5e-06, "epoch": 0.0767590618336887, "percentage": 2.56, "elapsed_time": "0:04:18", "remaining_time": "2:43:24"}
|
| 7 |
+
{"current_steps": 7, "total_steps": 234, "loss": 1.0356, "lr": 2.916666666666667e-06, "epoch": 0.08955223880597014, "percentage": 2.99, "elapsed_time": "0:04:58", "remaining_time": "2:41:14"}
|
| 8 |
+
{"current_steps": 8, "total_steps": 234, "loss": 0.9912, "lr": 3.3333333333333333e-06, "epoch": 0.1023454157782516, "percentage": 3.42, "elapsed_time": "0:05:36", "remaining_time": "2:38:35"}
|
| 9 |
+
{"current_steps": 9, "total_steps": 234, "loss": 0.947, "lr": 3.7500000000000005e-06, "epoch": 0.11513859275053305, "percentage": 3.85, "elapsed_time": "0:06:16", "remaining_time": "2:36:45"}
|
| 10 |
+
{"current_steps": 10, "total_steps": 234, "loss": 0.9328, "lr": 4.166666666666667e-06, "epoch": 0.1279317697228145, "percentage": 4.27, "elapsed_time": "0:06:54", "remaining_time": "2:34:39"}
|
| 11 |
+
{"current_steps": 11, "total_steps": 234, "loss": 0.9871, "lr": 4.583333333333333e-06, "epoch": 0.14072494669509594, "percentage": 4.7, "elapsed_time": "0:07:33", "remaining_time": "2:33:04"}
|
| 12 |
+
{"current_steps": 12, "total_steps": 234, "loss": 0.9252, "lr": 5e-06, "epoch": 0.1535181236673774, "percentage": 5.13, "elapsed_time": "0:08:11", "remaining_time": "2:31:31"}
|
| 13 |
+
{"current_steps": 13, "total_steps": 234, "loss": 0.9241, "lr": 5.416666666666667e-06, "epoch": 0.16631130063965885, "percentage": 5.56, "elapsed_time": "0:08:50", "remaining_time": "2:30:16"}
|
| 14 |
+
{"current_steps": 14, "total_steps": 234, "loss": 0.9443, "lr": 5.833333333333334e-06, "epoch": 0.1791044776119403, "percentage": 5.98, "elapsed_time": "0:09:32", "remaining_time": "2:30:00"}
|
| 15 |
+
{"current_steps": 15, "total_steps": 234, "loss": 0.946, "lr": 6.25e-06, "epoch": 0.19189765458422176, "percentage": 6.41, "elapsed_time": "0:10:14", "remaining_time": "2:29:25"}
|
| 16 |
+
{"current_steps": 16, "total_steps": 234, "loss": 0.8909, "lr": 6.666666666666667e-06, "epoch": 0.2046908315565032, "percentage": 6.84, "elapsed_time": "0:10:50", "remaining_time": "2:27:43"}
|
| 17 |
+
{"current_steps": 17, "total_steps": 234, "loss": 0.8488, "lr": 7.083333333333335e-06, "epoch": 0.21748400852878466, "percentage": 7.26, "elapsed_time": "0:11:25", "remaining_time": "2:25:52"}
|
| 18 |
+
{"current_steps": 18, "total_steps": 234, "loss": 0.9146, "lr": 7.500000000000001e-06, "epoch": 0.2302771855010661, "percentage": 7.69, "elapsed_time": "0:12:02", "remaining_time": "2:24:26"}
|
| 19 |
+
{"current_steps": 19, "total_steps": 234, "loss": 0.8769, "lr": 7.916666666666667e-06, "epoch": 0.24307036247334754, "percentage": 8.12, "elapsed_time": "0:12:41", "remaining_time": "2:23:31"}
|
| 20 |
+
{"current_steps": 20, "total_steps": 234, "loss": 0.8631, "lr": 8.333333333333334e-06, "epoch": 0.255863539445629, "percentage": 8.55, "elapsed_time": "0:13:15", "remaining_time": "2:21:49"}
|
| 21 |
+
{"current_steps": 21, "total_steps": 234, "loss": 0.8269, "lr": 8.750000000000001e-06, "epoch": 0.26865671641791045, "percentage": 8.97, "elapsed_time": "0:13:49", "remaining_time": "2:20:18"}
|
| 22 |
+
{"current_steps": 22, "total_steps": 234, "loss": 0.8353, "lr": 9.166666666666666e-06, "epoch": 0.2814498933901919, "percentage": 9.4, "elapsed_time": "0:14:25", "remaining_time": "2:18:55"}
|
| 23 |
+
{"current_steps": 23, "total_steps": 234, "loss": 0.8076, "lr": 9.583333333333335e-06, "epoch": 0.2942430703624733, "percentage": 9.83, "elapsed_time": "0:15:06", "remaining_time": "2:18:33"}
|
| 24 |
+
{"current_steps": 24, "total_steps": 234, "loss": 0.9005, "lr": 1e-05, "epoch": 0.3070362473347548, "percentage": 10.26, "elapsed_time": "0:15:46", "remaining_time": "2:17:59"}
|
| 25 |
+
{"current_steps": 25, "total_steps": 234, "loss": 0.8255, "lr": 9.999440509051367e-06, "epoch": 0.31982942430703626, "percentage": 10.68, "elapsed_time": "0:16:25", "remaining_time": "2:17:16"}
|
| 26 |
+
{"current_steps": 26, "total_steps": 234, "loss": 0.8363, "lr": 9.997762161417517e-06, "epoch": 0.3326226012793177, "percentage": 11.11, "elapsed_time": "0:17:07", "remaining_time": "2:16:58"}
|
| 27 |
+
{"current_steps": 27, "total_steps": 234, "loss": 0.7949, "lr": 9.994965332706574e-06, "epoch": 0.34541577825159914, "percentage": 11.54, "elapsed_time": "0:17:45", "remaining_time": "2:16:09"}
|
| 28 |
+
{"current_steps": 28, "total_steps": 234, "loss": 0.7652, "lr": 9.991050648838676e-06, "epoch": 0.3582089552238806, "percentage": 11.97, "elapsed_time": "0:18:21", "remaining_time": "2:15:05"}
|
| 29 |
+
{"current_steps": 29, "total_steps": 234, "loss": 0.8445, "lr": 9.986018985905901e-06, "epoch": 0.37100213219616207, "percentage": 12.39, "elapsed_time": "0:19:01", "remaining_time": "2:14:31"}
|
| 30 |
+
{"current_steps": 30, "total_steps": 234, "loss": 0.7808, "lr": 9.979871469976197e-06, "epoch": 0.3837953091684435, "percentage": 12.82, "elapsed_time": "0:19:39", "remaining_time": "2:13:41"}
|
| 31 |
+
{"current_steps": 31, "total_steps": 234, "loss": 0.8133, "lr": 9.972609476841368e-06, "epoch": 0.39658848614072495, "percentage": 13.25, "elapsed_time": "0:20:16", "remaining_time": "2:12:47"}
|
| 32 |
+
{"current_steps": 32, "total_steps": 234, "loss": 0.7957, "lr": 9.964234631709188e-06, "epoch": 0.4093816631130064, "percentage": 13.68, "elapsed_time": "0:20:59", "remaining_time": "2:12:30"}
|
| 33 |
+
{"current_steps": 33, "total_steps": 234, "loss": 0.8108, "lr": 9.954748808839675e-06, "epoch": 0.42217484008528783, "percentage": 14.1, "elapsed_time": "0:21:37", "remaining_time": "2:11:40"}
|
| 34 |
+
{"current_steps": 34, "total_steps": 234, "loss": 0.7906, "lr": 9.944154131125643e-06, "epoch": 0.4349680170575693, "percentage": 14.53, "elapsed_time": "0:22:12", "remaining_time": "2:10:39"}
|
| 35 |
+
{"current_steps": 35, "total_steps": 234, "loss": 0.8191, "lr": 9.932452969617607e-06, "epoch": 0.44776119402985076, "percentage": 14.96, "elapsed_time": "0:22:44", "remaining_time": "2:09:16"}
|
| 36 |
+
{"current_steps": 36, "total_steps": 234, "loss": 0.7929, "lr": 9.91964794299315e-06, "epoch": 0.4605543710021322, "percentage": 15.38, "elapsed_time": "0:23:24", "remaining_time": "2:08:45"}
|
| 37 |
+
{"current_steps": 37, "total_steps": 234, "loss": 0.853, "lr": 9.905741916970863e-06, "epoch": 0.47334754797441364, "percentage": 15.81, "elapsed_time": "0:24:04", "remaining_time": "2:08:12"}
|
| 38 |
+
{"current_steps": 38, "total_steps": 234, "loss": 0.7733, "lr": 9.890738003669029e-06, "epoch": 0.4861407249466951, "percentage": 16.24, "elapsed_time": "0:24:40", "remaining_time": "2:07:14"}
|
| 39 |
+
{"current_steps": 39, "total_steps": 234, "loss": 0.8247, "lr": 9.874639560909118e-06, "epoch": 0.4989339019189765, "percentage": 16.67, "elapsed_time": "0:25:15", "remaining_time": "2:06:15"}
|
| 40 |
+
{"current_steps": 40, "total_steps": 234, "loss": 0.8102, "lr": 9.857450191464337e-06, "epoch": 0.511727078891258, "percentage": 17.09, "elapsed_time": "0:25:53", "remaining_time": "2:05:33"}
|
| 41 |
+
{"current_steps": 41, "total_steps": 234, "loss": 0.7774, "lr": 9.839173742253334e-06, "epoch": 0.5245202558635395, "percentage": 17.52, "elapsed_time": "0:26:29", "remaining_time": "2:04:42"}
|
| 42 |
+
{"current_steps": 42, "total_steps": 234, "loss": 0.766, "lr": 9.819814303479268e-06, "epoch": 0.5373134328358209, "percentage": 17.95, "elapsed_time": "0:27:01", "remaining_time": "2:03:32"}
|
| 43 |
+
{"current_steps": 43, "total_steps": 234, "loss": 0.7937, "lr": 9.799376207714446e-06, "epoch": 0.5501066098081023, "percentage": 18.38, "elapsed_time": "0:27:40", "remaining_time": "2:02:57"}
|
| 44 |
+
{"current_steps": 44, "total_steps": 234, "loss": 0.7861, "lr": 9.777864028930705e-06, "epoch": 0.5628997867803838, "percentage": 18.8, "elapsed_time": "0:28:19", "remaining_time": "2:02:18"}
|
| 45 |
+
{"current_steps": 45, "total_steps": 234, "loss": 0.7356, "lr": 9.755282581475769e-06, "epoch": 0.5756929637526652, "percentage": 19.23, "elapsed_time": "0:29:01", "remaining_time": "2:01:54"}
|
| 46 |
+
{"current_steps": 46, "total_steps": 234, "loss": 0.7157, "lr": 9.731636918995821e-06, "epoch": 0.5884861407249466, "percentage": 19.66, "elapsed_time": "0:29:36", "remaining_time": "2:01:00"}
|
| 47 |
+
{"current_steps": 47, "total_steps": 234, "loss": 0.7599, "lr": 9.706932333304518e-06, "epoch": 0.6012793176972282, "percentage": 20.09, "elapsed_time": "0:30:13", "remaining_time": "2:00:15"}
|
| 48 |
+
{"current_steps": 48, "total_steps": 234, "loss": 0.7896, "lr": 9.681174353198687e-06, "epoch": 0.6140724946695096, "percentage": 20.51, "elapsed_time": "0:30:54", "remaining_time": "1:59:44"}
|
| 49 |
+
{"current_steps": 49, "total_steps": 234, "loss": 0.7775, "lr": 9.654368743221022e-06, "epoch": 0.6268656716417911, "percentage": 20.94, "elapsed_time": "0:31:26", "remaining_time": "1:58:40"}
|
| 50 |
+
{"current_steps": 50, "total_steps": 234, "loss": 0.7615, "lr": 9.626521502369984e-06, "epoch": 0.6396588486140725, "percentage": 21.37, "elapsed_time": "0:32:05", "remaining_time": "1:58:06"}
|
| 51 |
+
{"current_steps": 51, "total_steps": 234, "loss": 0.8036, "lr": 9.597638862757255e-06, "epoch": 0.652452025586354, "percentage": 21.79, "elapsed_time": "0:32:40", "remaining_time": "1:57:16"}
|
| 52 |
+
{"current_steps": 52, "total_steps": 234, "loss": 0.8269, "lr": 9.567727288213005e-06, "epoch": 0.6652452025586354, "percentage": 22.22, "elapsed_time": "0:33:15", "remaining_time": "1:56:22"}
|
| 53 |
+
{"current_steps": 53, "total_steps": 234, "loss": 0.7883, "lr": 9.536793472839325e-06, "epoch": 0.6780383795309168, "percentage": 22.65, "elapsed_time": "0:33:55", "remaining_time": "1:55:50"}
|
| 54 |
+
{"current_steps": 54, "total_steps": 234, "loss": 0.7726, "lr": 9.504844339512096e-06, "epoch": 0.6908315565031983, "percentage": 23.08, "elapsed_time": "0:34:29", "remaining_time": "1:54:58"}
|
| 55 |
+
{"current_steps": 55, "total_steps": 234, "loss": 0.7631, "lr": 9.471887038331686e-06, "epoch": 0.7036247334754797, "percentage": 23.5, "elapsed_time": "0:35:11", "remaining_time": "1:54:33"}
|
| 56 |
+
{"current_steps": 56, "total_steps": 234, "loss": 0.7916, "lr": 9.437928945022772e-06, "epoch": 0.7164179104477612, "percentage": 23.93, "elapsed_time": "0:35:52", "remaining_time": "1:54:01"}
|
| 57 |
+
{"current_steps": 57, "total_steps": 234, "loss": 0.7652, "lr": 9.40297765928369e-06, "epoch": 0.7292110874200426, "percentage": 24.36, "elapsed_time": "0:36:34", "remaining_time": "1:53:35"}
|
| 58 |
+
{"current_steps": 58, "total_steps": 234, "loss": 0.7946, "lr": 9.36704100308565e-06, "epoch": 0.7420042643923241, "percentage": 24.79, "elapsed_time": "0:37:12", "remaining_time": "1:52:54"}
|
| 59 |
+
{"current_steps": 59, "total_steps": 234, "loss": 0.7118, "lr": 9.330127018922195e-06, "epoch": 0.7547974413646056, "percentage": 25.21, "elapsed_time": "0:37:52", "remaining_time": "1:52:19"}
|
| 60 |
+
{"current_steps": 60, "total_steps": 234, "loss": 0.7412, "lr": 9.292243968009332e-06, "epoch": 0.767590618336887, "percentage": 25.64, "elapsed_time": "0:38:31", "remaining_time": "1:51:42"}
|
| 61 |
+
{"current_steps": 61, "total_steps": 234, "loss": 0.7634, "lr": 9.253400328436699e-06, "epoch": 0.7803837953091685, "percentage": 26.07, "elapsed_time": "0:39:11", "remaining_time": "1:51:10"}
|
| 62 |
+
{"current_steps": 62, "total_steps": 234, "loss": 0.7353, "lr": 9.213604793270196e-06, "epoch": 0.7931769722814499, "percentage": 26.5, "elapsed_time": "0:39:48", "remaining_time": "1:50:26"}
|
| 63 |
+
{"current_steps": 63, "total_steps": 234, "loss": 0.7697, "lr": 9.172866268606514e-06, "epoch": 0.8059701492537313, "percentage": 26.92, "elapsed_time": "0:40:22", "remaining_time": "1:49:36"}
|
| 64 |
+
{"current_steps": 64, "total_steps": 234, "loss": 0.8112, "lr": 9.131193871579975e-06, "epoch": 0.8187633262260128, "percentage": 27.35, "elapsed_time": "0:41:02", "remaining_time": "1:49:02"}
|
| 65 |
+
{"current_steps": 65, "total_steps": 234, "loss": 0.731, "lr": 9.088596928322158e-06, "epoch": 0.8315565031982942, "percentage": 27.78, "elapsed_time": "0:41:41", "remaining_time": "1:48:24"}
|
| 66 |
+
{"current_steps": 66, "total_steps": 234, "loss": 0.7206, "lr": 9.045084971874738e-06, "epoch": 0.8443496801705757, "percentage": 28.21, "elapsed_time": "0:42:23", "remaining_time": "1:47:53"}
|
| 67 |
+
{"current_steps": 67, "total_steps": 234, "loss": 0.7843, "lr": 9.000667740056033e-06, "epoch": 0.8571428571428571, "percentage": 28.63, "elapsed_time": "0:42:59", "remaining_time": "1:47:10"}
|
| 68 |
+
{"current_steps": 68, "total_steps": 234, "loss": 0.76, "lr": 8.955355173281709e-06, "epoch": 0.8699360341151386, "percentage": 29.06, "elapsed_time": "0:43:37", "remaining_time": "1:46:28"}
|
| 69 |
+
{"current_steps": 69, "total_steps": 234, "loss": 0.7674, "lr": 8.90915741234015e-06, "epoch": 0.8827292110874201, "percentage": 29.49, "elapsed_time": "0:44:09", "remaining_time": "1:45:35"}
|
| 70 |
+
{"current_steps": 70, "total_steps": 234, "loss": 0.7656, "lr": 8.862084796122998e-06, "epoch": 0.8955223880597015, "percentage": 29.91, "elapsed_time": "0:44:45", "remaining_time": "1:44:50"}
|
| 71 |
+
{"current_steps": 71, "total_steps": 234, "loss": 0.7303, "lr": 8.814147859311333e-06, "epoch": 0.908315565031983, "percentage": 30.34, "elapsed_time": "0:45:22", "remaining_time": "1:44:09"}
|
| 72 |
+
{"current_steps": 72, "total_steps": 234, "loss": 0.7295, "lr": 8.765357330018056e-06, "epoch": 0.9211087420042644, "percentage": 30.77, "elapsed_time": "0:45:58", "remaining_time": "1:43:26"}
|
| 73 |
+
{"current_steps": 73, "total_steps": 234, "loss": 0.7075, "lr": 8.715724127386971e-06, "epoch": 0.9339019189765458, "percentage": 31.2, "elapsed_time": "0:46:31", "remaining_time": "1:42:35"}
|
| 74 |
+
{"current_steps": 74, "total_steps": 234, "loss": 0.7279, "lr": 8.665259359149132e-06, "epoch": 0.9466950959488273, "percentage": 31.62, "elapsed_time": "0:47:07", "remaining_time": "1:41:54"}
|
| 75 |
+
{"current_steps": 75, "total_steps": 234, "loss": 0.7424, "lr": 8.613974319136959e-06, "epoch": 0.9594882729211087, "percentage": 32.05, "elapsed_time": "0:47:48", "remaining_time": "1:41:21"}
|
| 76 |
+
{"current_steps": 76, "total_steps": 234, "loss": 0.7957, "lr": 8.561880484756726e-06, "epoch": 0.9722814498933902, "percentage": 32.48, "elapsed_time": "0:48:28", "remaining_time": "1:40:45"}
|
| 77 |
+
{"current_steps": 77, "total_steps": 234, "loss": 0.7539, "lr": 8.508989514419959e-06, "epoch": 0.9850746268656716, "percentage": 32.91, "elapsed_time": "0:49:07", "remaining_time": "1:40:08"}
|
| 78 |
+
{"current_steps": 78, "total_steps": 234, "loss": 0.7057, "lr": 8.455313244934324e-06, "epoch": 0.997867803837953, "percentage": 33.33, "elapsed_time": "0:49:44", "remaining_time": "1:39:29"}
|
| 79 |
+
{"current_steps": 79, "total_steps": 234, "loss": 1.2963, "lr": 8.400863688854598e-06, "epoch": 1.0106609808102345, "percentage": 33.76, "elapsed_time": "0:51:12", "remaining_time": "1:40:28"}
|
| 80 |
+
{"current_steps": 80, "total_steps": 234, "loss": 0.7255, "lr": 8.345653031794292e-06, "epoch": 1.023454157782516, "percentage": 34.19, "elapsed_time": "0:51:54", "remaining_time": "1:39:54"}
|
| 81 |
+
{"current_steps": 81, "total_steps": 234, "loss": 0.7831, "lr": 8.289693629698564e-06, "epoch": 1.0362473347547974, "percentage": 34.62, "elapsed_time": "0:52:33", "remaining_time": "1:39:16"}
|
| 82 |
+
{"current_steps": 82, "total_steps": 234, "loss": 0.7265, "lr": 8.232998006078998e-06, "epoch": 1.049040511727079, "percentage": 35.04, "elapsed_time": "0:53:08", "remaining_time": "1:38:30"}
|
| 83 |
+
{"current_steps": 83, "total_steps": 234, "loss": 0.7189, "lr": 8.175578849210894e-06, "epoch": 1.0618336886993602, "percentage": 35.47, "elapsed_time": "0:53:43", "remaining_time": "1:37:45"}
|
| 84 |
+
{"current_steps": 84, "total_steps": 234, "loss": 0.7964, "lr": 8.117449009293668e-06, "epoch": 1.0746268656716418, "percentage": 35.9, "elapsed_time": "0:54:23", "remaining_time": "1:37:07"}
|
| 85 |
+
{"current_steps": 85, "total_steps": 234, "loss": 0.6198, "lr": 8.058621495575032e-06, "epoch": 1.0874200426439233, "percentage": 36.32, "elapsed_time": "0:55:03", "remaining_time": "1:36:30"}
|
| 86 |
+
{"current_steps": 86, "total_steps": 234, "loss": 0.6642, "lr": 7.99910947343957e-06, "epoch": 1.1002132196162047, "percentage": 36.75, "elapsed_time": "0:55:44", "remaining_time": "1:35:55"}
|
| 87 |
+
{"current_steps": 87, "total_steps": 234, "loss": 0.731, "lr": 7.938926261462366e-06, "epoch": 1.1130063965884862, "percentage": 37.18, "elapsed_time": "0:56:24", "remaining_time": "1:35:18"}
|
| 88 |
+
{"current_steps": 88, "total_steps": 234, "loss": 0.794, "lr": 7.87808532842837e-06, "epoch": 1.1257995735607675, "percentage": 37.61, "elapsed_time": "0:57:04", "remaining_time": "1:34:41"}
|
| 89 |
+
{"current_steps": 89, "total_steps": 234, "loss": 0.6953, "lr": 7.81660029031811e-06, "epoch": 1.138592750533049, "percentage": 38.03, "elapsed_time": "0:57:45", "remaining_time": "1:34:05"}
|
| 90 |
+
{"current_steps": 90, "total_steps": 234, "loss": 0.7541, "lr": 7.754484907260513e-06, "epoch": 1.1513859275053304, "percentage": 38.46, "elapsed_time": "0:58:24", "remaining_time": "1:33:26"}
|
| 91 |
+
{"current_steps": 91, "total_steps": 234, "loss": 0.704, "lr": 7.691753080453413e-06, "epoch": 1.164179104477612, "percentage": 38.89, "elapsed_time": "0:59:00", "remaining_time": "1:32:43"}
|
| 92 |
+
{"current_steps": 92, "total_steps": 234, "loss": 0.778, "lr": 7.628418849052523e-06, "epoch": 1.1769722814498933, "percentage": 39.32, "elapsed_time": "0:59:37", "remaining_time": "1:32:01"}
|
| 93 |
+
{"current_steps": 93, "total_steps": 234, "loss": 0.7215, "lr": 7.564496387029532e-06, "epoch": 1.1897654584221748, "percentage": 39.74, "elapsed_time": "1:00:12", "remaining_time": "1:31:17"}
|
| 94 |
+
{"current_steps": 94, "total_steps": 234, "loss": 0.7299, "lr": 7.500000000000001e-06, "epoch": 1.2025586353944564, "percentage": 40.17, "elapsed_time": "1:00:51", "remaining_time": "1:30:39"}
|
| 95 |
+
{"current_steps": 95, "total_steps": 234, "loss": 0.7224, "lr": 7.434944122021837e-06, "epoch": 1.2153518123667377, "percentage": 40.6, "elapsed_time": "1:01:28", "remaining_time": "1:29:56"}
|
| 96 |
+
{"current_steps": 96, "total_steps": 234, "loss": 0.7617, "lr": 7.369343312364994e-06, "epoch": 1.2281449893390193, "percentage": 41.03, "elapsed_time": "1:02:11", "remaining_time": "1:29:24"}
|
| 97 |
+
{"current_steps": 97, "total_steps": 234, "loss": 0.7441, "lr": 7.303212252253163e-06, "epoch": 1.2409381663113006, "percentage": 41.45, "elapsed_time": "1:02:52", "remaining_time": "1:28:48"}
|
| 98 |
+
{"current_steps": 98, "total_steps": 234, "loss": 0.7311, "lr": 7.236565741578163e-06, "epoch": 1.2537313432835822, "percentage": 41.88, "elapsed_time": "1:03:32", "remaining_time": "1:28:10"}
|
| 99 |
+
{"current_steps": 99, "total_steps": 234, "loss": 0.7698, "lr": 7.169418695587791e-06, "epoch": 1.2665245202558635, "percentage": 42.31, "elapsed_time": "1:04:09", "remaining_time": "1:27:29"}
|
| 100 |
+
{"current_steps": 100, "total_steps": 234, "loss": 0.6731, "lr": 7.101786141547829e-06, "epoch": 1.279317697228145, "percentage": 42.74, "elapsed_time": "1:04:52", "remaining_time": "1:26:56"}
|
| 101 |
+
{"current_steps": 101, "total_steps": 234, "loss": 0.7077, "lr": 7.033683215379002e-06, "epoch": 1.2921108742004264, "percentage": 43.16, "elapsed_time": "1:05:27", "remaining_time": "1:26:11"}
|
| 102 |
+
{"current_steps": 102, "total_steps": 234, "loss": 0.7422, "lr": 6.965125158269619e-06, "epoch": 1.304904051172708, "percentage": 43.59, "elapsed_time": "1:06:03", "remaining_time": "1:25:29"}
|
| 103 |
+
{"current_steps": 103, "total_steps": 234, "loss": 0.7129, "lr": 6.896127313264643e-06, "epoch": 1.3176972281449895, "percentage": 44.02, "elapsed_time": "1:06:43", "remaining_time": "1:24:51"}
|
| 104 |
+
{"current_steps": 104, "total_steps": 234, "loss": 0.7248, "lr": 6.8267051218319766e-06, "epoch": 1.3304904051172708, "percentage": 44.44, "elapsed_time": "1:07:17", "remaining_time": "1:24:07"}
|
| 105 |
+
{"current_steps": 105, "total_steps": 234, "loss": 0.6923, "lr": 6.7568741204067145e-06, "epoch": 1.3432835820895521, "percentage": 44.87, "elapsed_time": "1:07:51", "remaining_time": "1:23:22"}
|
| 106 |
+
{"current_steps": 106, "total_steps": 234, "loss": 0.732, "lr": 6.686649936914151e-06, "epoch": 1.3560767590618337, "percentage": 45.3, "elapsed_time": "1:08:28", "remaining_time": "1:22:41"}
|
| 107 |
+
{"current_steps": 107, "total_steps": 234, "loss": 0.7672, "lr": 6.616048287272301e-06, "epoch": 1.3688699360341152, "percentage": 45.73, "elapsed_time": "1:09:06", "remaining_time": "1:22:01"}
|
| 108 |
+
{"current_steps": 108, "total_steps": 234, "loss": 0.7898, "lr": 6.545084971874738e-06, "epoch": 1.3816631130063965, "percentage": 46.15, "elapsed_time": "1:09:45", "remaining_time": "1:21:23"}
|
| 109 |
+
{"current_steps": 109, "total_steps": 234, "loss": 0.7054, "lr": 6.473775872054522e-06, "epoch": 1.394456289978678, "percentage": 46.58, "elapsed_time": "1:10:21", "remaining_time": "1:20:40"}
|
| 110 |
+
{"current_steps": 110, "total_steps": 234, "loss": 0.6099, "lr": 6.402136946530014e-06, "epoch": 1.4072494669509594, "percentage": 47.01, "elapsed_time": "1:10:56", "remaining_time": "1:19:58"}
|
| 111 |
+
{"current_steps": 111, "total_steps": 234, "loss": 0.7295, "lr": 6.330184227833376e-06, "epoch": 1.420042643923241, "percentage": 47.44, "elapsed_time": "1:11:33", "remaining_time": "1:19:17"}
|
| 112 |
+
{"current_steps": 112, "total_steps": 234, "loss": 0.7267, "lr": 6.257933818722544e-06, "epoch": 1.4328358208955223, "percentage": 47.86, "elapsed_time": "1:12:11", "remaining_time": "1:18:38"}
|
| 113 |
+
{"current_steps": 113, "total_steps": 234, "loss": 0.6789, "lr": 6.185401888577488e-06, "epoch": 1.4456289978678039, "percentage": 48.29, "elapsed_time": "1:12:49", "remaining_time": "1:17:58"}
|
| 114 |
+
{"current_steps": 114, "total_steps": 234, "loss": 0.8178, "lr": 6.112604669781572e-06, "epoch": 1.4584221748400852, "percentage": 48.72, "elapsed_time": "1:13:26", "remaining_time": "1:17:18"}
|
| 115 |
+
{"current_steps": 115, "total_steps": 234, "loss": 0.6645, "lr": 6.039558454088796e-06, "epoch": 1.4712153518123667, "percentage": 49.15, "elapsed_time": "1:14:02", "remaining_time": "1:16:36"}
|
| 116 |
+
{"current_steps": 116, "total_steps": 234, "loss": 0.7598, "lr": 5.9662795889777666e-06, "epoch": 1.4840085287846483, "percentage": 49.57, "elapsed_time": "1:14:42", "remaining_time": "1:15:59"}
|
| 117 |
+
{"current_steps": 117, "total_steps": 234, "loss": 0.6873, "lr": 5.892784473993184e-06, "epoch": 1.4968017057569296, "percentage": 50.0, "elapsed_time": "1:15:21", "remaining_time": "1:15:21"}
|
| 118 |
+
{"current_steps": 118, "total_steps": 234, "loss": 0.7689, "lr": 5.819089557075689e-06, "epoch": 1.509594882729211, "percentage": 50.43, "elapsed_time": "1:15:58", "remaining_time": "1:14:41"}
|
| 119 |
+
{"current_steps": 119, "total_steps": 234, "loss": 0.6531, "lr": 5.745211330880872e-06, "epoch": 1.5223880597014925, "percentage": 50.85, "elapsed_time": "1:16:38", "remaining_time": "1:14:03"}
|
| 120 |
+
{"current_steps": 120, "total_steps": 234, "loss": 0.7181, "lr": 5.671166329088278e-06, "epoch": 1.535181236673774, "percentage": 51.28, "elapsed_time": "1:17:15", "remaining_time": "1:13:23"}
|
| 121 |
+
{"current_steps": 121, "total_steps": 234, "loss": 0.6797, "lr": 5.596971122701221e-06, "epoch": 1.5479744136460556, "percentage": 51.71, "elapsed_time": "1:17:51", "remaining_time": "1:12:42"}
|
| 122 |
+
{"current_steps": 122, "total_steps": 234, "loss": 0.736, "lr": 5.522642316338268e-06, "epoch": 1.560767590618337, "percentage": 52.14, "elapsed_time": "1:18:29", "remaining_time": "1:12:03"}
|
| 123 |
+
{"current_steps": 123, "total_steps": 234, "loss": 0.7289, "lr": 5.448196544517168e-06, "epoch": 1.5735607675906182, "percentage": 52.56, "elapsed_time": "1:19:06", "remaining_time": "1:11:23"}
|
| 124 |
+
{"current_steps": 124, "total_steps": 234, "loss": 0.7216, "lr": 5.373650467932122e-06, "epoch": 1.5863539445628998, "percentage": 52.99, "elapsed_time": "1:19:49", "remaining_time": "1:10:48"}
|
| 125 |
+
{"current_steps": 125, "total_steps": 234, "loss": 0.6882, "lr": 5.299020769725172e-06, "epoch": 1.5991471215351813, "percentage": 53.42, "elapsed_time": "1:20:29", "remaining_time": "1:10:11"}
|
| 126 |
+
{"current_steps": 126, "total_steps": 234, "loss": 0.7057, "lr": 5.224324151752575e-06, "epoch": 1.6119402985074627, "percentage": 53.85, "elapsed_time": "1:21:09", "remaining_time": "1:09:34"}
|
| 127 |
+
{"current_steps": 127, "total_steps": 234, "loss": 0.6429, "lr": 5.1495773308469935e-06, "epoch": 1.624733475479744, "percentage": 54.27, "elapsed_time": "1:21:45", "remaining_time": "1:08:53"}
|
| 128 |
+
{"current_steps": 128, "total_steps": 234, "loss": 0.7052, "lr": 5.074797035076319e-06, "epoch": 1.6375266524520256, "percentage": 54.7, "elapsed_time": "1:22:20", "remaining_time": "1:08:11"}
|
| 129 |
+
{"current_steps": 129, "total_steps": 234, "loss": 0.773, "lr": 5e-06, "epoch": 1.650319829424307, "percentage": 55.13, "elapsed_time": "1:22:59", "remaining_time": "1:07:33"}
|
| 130 |
+
{"current_steps": 130, "total_steps": 234, "loss": 0.7179, "lr": 4.9252029649236835e-06, "epoch": 1.6631130063965884, "percentage": 55.56, "elapsed_time": "1:23:36", "remaining_time": "1:06:53"}
|
| 131 |
+
{"current_steps": 131, "total_steps": 234, "loss": 0.6379, "lr": 4.850422669153009e-06, "epoch": 1.67590618336887, "percentage": 55.98, "elapsed_time": "1:24:14", "remaining_time": "1:06:13"}
|
| 132 |
+
{"current_steps": 132, "total_steps": 234, "loss": 0.7232, "lr": 4.775675848247427e-06, "epoch": 1.6886993603411513, "percentage": 56.41, "elapsed_time": "1:24:50", "remaining_time": "1:05:33"}
|
| 133 |
+
{"current_steps": 133, "total_steps": 234, "loss": 0.663, "lr": 4.700979230274829e-06, "epoch": 1.7014925373134329, "percentage": 56.84, "elapsed_time": "1:25:23", "remaining_time": "1:04:50"}
|
| 134 |
+
{"current_steps": 134, "total_steps": 234, "loss": 0.7215, "lr": 4.626349532067879e-06, "epoch": 1.7142857142857144, "percentage": 57.26, "elapsed_time": "1:26:03", "remaining_time": "1:04:13"}
|
| 135 |
+
{"current_steps": 135, "total_steps": 234, "loss": 0.788, "lr": 4.551803455482833e-06, "epoch": 1.7270788912579957, "percentage": 57.69, "elapsed_time": "1:26:44", "remaining_time": "1:03:36"}
|
| 136 |
+
{"current_steps": 136, "total_steps": 234, "loss": 0.745, "lr": 4.477357683661734e-06, "epoch": 1.739872068230277, "percentage": 58.12, "elapsed_time": "1:27:24", "remaining_time": "1:02:59"}
|
| 137 |
+
{"current_steps": 137, "total_steps": 234, "loss": 0.7129, "lr": 4.4030288772987795e-06, "epoch": 1.7526652452025586, "percentage": 58.55, "elapsed_time": "1:28:02", "remaining_time": "1:02:19"}
|
| 138 |
+
{"current_steps": 138, "total_steps": 234, "loss": 0.7328, "lr": 4.3288336709117246e-06, "epoch": 1.7654584221748402, "percentage": 58.97, "elapsed_time": "1:28:43", "remaining_time": "1:01:43"}
|
| 139 |
+
{"current_steps": 139, "total_steps": 234, "loss": 0.7738, "lr": 4.254788669119127e-06, "epoch": 1.7782515991471215, "percentage": 59.4, "elapsed_time": "1:29:17", "remaining_time": "1:01:01"}
|
| 140 |
+
{"current_steps": 140, "total_steps": 234, "loss": 0.6317, "lr": 4.180910442924312e-06, "epoch": 1.7910447761194028, "percentage": 59.83, "elapsed_time": "1:29:59", "remaining_time": "1:00:25"}
|
| 141 |
+
{"current_steps": 141, "total_steps": 234, "loss": 0.7288, "lr": 4.107215526006818e-06, "epoch": 1.8038379530916844, "percentage": 60.26, "elapsed_time": "1:30:35", "remaining_time": "0:59:45"}
|
| 142 |
+
{"current_steps": 142, "total_steps": 234, "loss": 0.776, "lr": 4.033720411022235e-06, "epoch": 1.816631130063966, "percentage": 60.68, "elapsed_time": "1:31:11", "remaining_time": "0:59:05"}
|
| 143 |
+
{"current_steps": 143, "total_steps": 234, "loss": 0.683, "lr": 3.960441545911205e-06, "epoch": 1.8294243070362475, "percentage": 61.11, "elapsed_time": "1:31:47", "remaining_time": "0:58:25"}
|
| 144 |
+
{"current_steps": 144, "total_steps": 234, "loss": 0.7194, "lr": 3.887395330218429e-06, "epoch": 1.8422174840085288, "percentage": 61.54, "elapsed_time": "1:32:27", "remaining_time": "0:57:47"}
|
| 145 |
+
{"current_steps": 145, "total_steps": 234, "loss": 0.7744, "lr": 3.8145981114225135e-06, "epoch": 1.8550106609808101, "percentage": 61.97, "elapsed_time": "1:33:04", "remaining_time": "0:57:07"}
|
| 146 |
+
{"current_steps": 146, "total_steps": 234, "loss": 0.7245, "lr": 3.7420661812774577e-06, "epoch": 1.8678038379530917, "percentage": 62.39, "elapsed_time": "1:33:43", "remaining_time": "0:56:29"}
|
| 147 |
+
{"current_steps": 147, "total_steps": 234, "loss": 0.6878, "lr": 3.669815772166625e-06, "epoch": 1.8805970149253732, "percentage": 62.82, "elapsed_time": "1:34:20", "remaining_time": "0:55:50"}
|
| 148 |
+
{"current_steps": 148, "total_steps": 234, "loss": 0.7315, "lr": 3.5978630534699873e-06, "epoch": 1.8933901918976546, "percentage": 63.25, "elapsed_time": "1:35:00", "remaining_time": "0:55:12"}
|
| 149 |
+
{"current_steps": 149, "total_steps": 234, "loss": 0.6943, "lr": 3.526224127945479e-06, "epoch": 1.906183368869936, "percentage": 63.68, "elapsed_time": "1:35:38", "remaining_time": "0:54:33"}
|
| 150 |
+
{"current_steps": 150, "total_steps": 234, "loss": 0.6904, "lr": 3.4549150281252635e-06, "epoch": 1.9189765458422174, "percentage": 64.1, "elapsed_time": "1:36:16", "remaining_time": "0:53:54"}
|
| 151 |
+
{"current_steps": 151, "total_steps": 234, "loss": 0.7495, "lr": 3.383951712727701e-06, "epoch": 1.931769722814499, "percentage": 64.53, "elapsed_time": "1:36:52", "remaining_time": "0:53:14"}
|
| 152 |
+
{"current_steps": 152, "total_steps": 234, "loss": 0.7436, "lr": 3.3133500630858507e-06, "epoch": 1.9445628997867805, "percentage": 64.96, "elapsed_time": "1:37:32", "remaining_time": "0:52:37"}
|
| 153 |
+
{"current_steps": 153, "total_steps": 234, "loss": 0.7042, "lr": 3.2431258795932863e-06, "epoch": 1.9573560767590619, "percentage": 65.38, "elapsed_time": "1:38:14", "remaining_time": "0:52:00"}
|
| 154 |
+
{"current_steps": 154, "total_steps": 234, "loss": 0.7379, "lr": 3.173294878168025e-06, "epoch": 1.9701492537313432, "percentage": 65.81, "elapsed_time": "1:38:50", "remaining_time": "0:51:20"}
|
| 155 |
+
{"current_steps": 155, "total_steps": 234, "loss": 0.6986, "lr": 3.1038726867353587e-06, "epoch": 1.9829424307036247, "percentage": 66.24, "elapsed_time": "1:39:25", "remaining_time": "0:50:40"}
|
| 156 |
+
{"current_steps": 156, "total_steps": 234, "loss": 0.7317, "lr": 3.0348748417303826e-06, "epoch": 1.9957356076759063, "percentage": 66.67, "elapsed_time": "1:40:08", "remaining_time": "0:50:04"}
|
| 157 |
+
{"current_steps": 157, "total_steps": 234, "loss": 1.2703, "lr": 2.966316784621e-06, "epoch": 2.008528784648188, "percentage": 67.09, "elapsed_time": "1:41:43", "remaining_time": "0:49:53"}
|
| 158 |
+
{"current_steps": 158, "total_steps": 234, "loss": 0.7064, "lr": 2.8982138584521734e-06, "epoch": 2.021321961620469, "percentage": 67.52, "elapsed_time": "1:42:21", "remaining_time": "0:49:14"}
|
| 159 |
+
{"current_steps": 159, "total_steps": 234, "loss": 0.7061, "lr": 2.83058130441221e-06, "epoch": 2.0341151385927505, "percentage": 67.95, "elapsed_time": "1:43:01", "remaining_time": "0:48:35"}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fd829d3bf9887a0975fbdbd5e9f7a335b310b8ee3d9f424e863dcbf18e2692ee
|
| 3 |
+
size 7416
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|