diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/config.json b/llava-v1.5-7b-concat-16/checkpoint-2000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..5b87d31c757ff1906899f1e3a1d047752a0c5005
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/config.json
@@ -0,0 +1,44 @@
+{
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "pad",
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1536,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "Leonardo6/clip-12m-16-roberta4",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/generation_config.json b/llava-v1.5-7b-concat-16/checkpoint-2000/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f686e33d0dd24a8bc304bf932f5bc12717579f0b
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/generation_config.json
@@ -0,0 +1,11 @@
+{
+ "attn_implementation": "flash_attention_2",
+ "bos_token_id": 1,
+ "do_sample": true,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "temperature": 0.9,
+ "top_p": 0.6,
+ "transformers_version": "4.37.2"
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/latest b/llava-v1.5-7b-concat-16/checkpoint-2000/latest
new file mode 100644
index 0000000000000000000000000000000000000000..2a79fdc19587e6bc9de060e90633f3a151b04516
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/latest
@@ -0,0 +1 @@
+global_step2000
\ No newline at end of file
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/model.safetensors.index.json b/llava-v1.5-7b-concat-16/checkpoint-2000/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..51cd1fe67b08db18738439b039f9eec8e67fa02f
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/model.safetensors.index.json
@@ -0,0 +1,701 @@
+{
+ "metadata": {
+ "total_size": 13867362304
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.class_embedding": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.patch_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.position_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.cls_token": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.position_embeddings": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_0.pth b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..44bb770e4c85b7b758a6b2962384781d026daabd
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af2966def51ea1ab87d97a757bd22e7f72001f21baee1a67abfc367e92e2e402
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_1.pth b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..75cd02b1fceb1b3b1aae40cf4857ce2cea6fd436
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8cf5883933ffd2749908af2fffabf58c748ecc9afbc507bfa1868172477bbf0c
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_2.pth b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..127a0ab4fb3652fab0edcb4ecc63af17870be47c
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b4f7265b828abac3132886540e39210cab53edc42ddf0389bd517ccd5c1ca42d
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_3.pth b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_3.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a696a927c56c2b5ca8cb6f3d71f9ca36a1ae9fea
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/rng_state_3.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1ebdec8d90b17c1d6090b2bc79535cba013a72aa00b297c128236362564f916
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/scheduler.pt b/llava-v1.5-7b-concat-16/checkpoint-2000/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..c20523bf4d8855eee9b66d3fec7145a48e803e60
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:501e98e87214ddc97624a6af56cd9ac8d257522474c6d2dd9591ef941c84b343
+size 1064
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/special_tokens_map.json b/llava-v1.5-7b-concat-16/checkpoint-2000/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/tokenizer.model b/llava-v1.5-7b-concat-16/checkpoint-2000/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/tokenizer_config.json b/llava-v1.5-7b-concat-16/checkpoint-2000/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/trainer_state.json b/llava-v1.5-7b-concat-16/checkpoint-2000/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..24002b9bf594a65ff138d09d1c534f246ca8e474
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/trainer_state.json
@@ -0,0 +1,12021 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.384781876773604,
+ "eval_steps": 500,
+ "global_step": 2000,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 1.437,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.4396,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.4201,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.4526,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.4033,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.4341,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.455,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.4195,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 1.4367,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.3549,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.3929,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.3577,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.3198,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.242,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.2693,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.3043,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.2034,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.1896,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.2483,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.1324,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.2191,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.1962,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.125,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.2311,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.1687,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.1214,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.1449,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.139,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.0864,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.1032,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.1475,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.0742,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.1101,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.0727,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.0478,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 1.099,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 0.3001,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.095,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 1.0828,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.0715,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.0794,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.1222,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.0315,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 1.0473,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 1.1067,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 1.0335,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 1.0009,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.0086,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0027,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 1.0066,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 1.0375,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 1.0491,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.0522,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 0.9977,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0516,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 0.3065,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 1.057,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 1.0589,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 1.0796,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 1.0433,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 0.9848,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 1.0166,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 0.9902,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 1.0357,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 0.9981,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 0.9887,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 0.9445,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0034,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 0.9865,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.0095,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 0.988,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 0.9673,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 1.0383,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 0.9842,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 0.9988,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 0.9715,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 0.9306,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 1.0179,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 1.0813,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 0.9825,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 1.0437,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 1.0863,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 1.0367,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 0.9808,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 0.9815,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 1.0001,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 1.0443,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 1.0108,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 0.2945,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 0.9728,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 1.0072,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 1.0504,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 1.0167,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 0.9801,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 0.307,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 0.9832,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 1.0245,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 1.016,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 0.9245,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 0.2833,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 0.9694,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0847,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 0.9805,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 1.0026,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 0.2628,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 0.9652,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 1.0551,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 0.9897,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 1.0074,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 0.9967,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 0.9988,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 1.07,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.9754,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 1.022,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 0.9851,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 0.2876,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 1.0329,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 0.9843,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 1.0554,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 0.9708,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 1.0252,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 1.0464,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 0.9278,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 1.0028,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 1.0319,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 0.9877,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 1.067,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.8855,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9933,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.0038,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 1.0337,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9185,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.9293,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9535,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 0.9931,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.934,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 0.9966,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 1.018,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 0.9646,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 1.0316,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 1.0237,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 1.0058,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 1.0256,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.9973,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 0.9952,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 0.9886,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 0.9792,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 0.9918,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 0.9943,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 1.0113,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 1.0034,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 0.2836,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 0.9962,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 0.9848,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9459,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 1.06,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999998058057616e-05,
+ "loss": 1.0001,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999992232231216e-05,
+ "loss": 1.0235,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999998252252306e-05,
+ "loss": 0.9819,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999968928936924e-05,
+ "loss": 0.9859,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999995145147809e-05,
+ "loss": 0.9607,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999930090153335e-05,
+ "loss": 0.9999,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999904844970963e-05,
+ "loss": 0.9986,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999987571594078e-05,
+ "loss": 0.337,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.99998427030741e-05,
+ "loss": 0.9978,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999980580638374e-05,
+ "loss": 1.0083,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999976502588403e-05,
+ "loss": 0.9703,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999720361590812e-05,
+ "loss": 0.9653,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999671813521435e-05,
+ "loss": 0.9899,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999961938169475e-05,
+ "loss": 0.9462,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999563066131124e-05,
+ "loss": 0.8944,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999502866852427e-05,
+ "loss": 1.0217,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999943878388204e-05,
+ "loss": 0.9505,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999370817244853e-05,
+ "loss": 0.9858,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999298966967264e-05,
+ "loss": 1.0156,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999223233077178e-05,
+ "loss": 1.001,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999914361560401e-05,
+ "loss": 0.9823,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999060114578682e-05,
+ "loss": 0.9295,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998972730033624e-05,
+ "loss": 0.9641,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998881462002778e-05,
+ "loss": 0.2889,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998786310521585e-05,
+ "loss": 0.9556,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998687275627008e-05,
+ "loss": 1.0336,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998584357357503e-05,
+ "loss": 0.9954,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998477555753054e-05,
+ "loss": 0.958,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998366870855134e-05,
+ "loss": 1.0338,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999825230270673e-05,
+ "loss": 0.982,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998133851352342e-05,
+ "loss": 0.3328,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998011516837974e-05,
+ "loss": 0.9857,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999788529921114e-05,
+ "loss": 0.917,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999775519852086e-05,
+ "loss": 0.2945,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999762121481767e-05,
+ "loss": 0.9773,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99974833481536e-05,
+ "loss": 0.9617,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997341598582197e-05,
+ "loss": 1.0578,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997195966158518e-05,
+ "loss": 0.9984,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997046450939122e-05,
+ "loss": 0.9619,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996893052982083e-05,
+ "loss": 1.0214,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996735772346973e-05,
+ "loss": 0.9952,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996574609094887e-05,
+ "loss": 1.0151,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996409563288404e-05,
+ "loss": 0.9638,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996240634991645e-05,
+ "loss": 0.9891,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996067824270204e-05,
+ "loss": 1.0223,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999589113119121e-05,
+ "loss": 1.0309,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995710555823277e-05,
+ "loss": 1.0079,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999552609823655e-05,
+ "loss": 0.9522,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999533775850266e-05,
+ "loss": 0.3102,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995145536694764e-05,
+ "loss": 0.9981,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994949432887512e-05,
+ "loss": 0.9842,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999474944715708e-05,
+ "loss": 0.9885,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994545579581125e-05,
+ "loss": 1.0181,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994337830238836e-05,
+ "loss": 0.9843,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994126199210897e-05,
+ "loss": 0.9757,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999391068657951e-05,
+ "loss": 0.9023,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993691292428364e-05,
+ "loss": 0.9472,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993468016842684e-05,
+ "loss": 0.9836,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999324085990918e-05,
+ "loss": 0.9871,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993009821716076e-05,
+ "loss": 1.0082,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992774902353104e-05,
+ "loss": 0.2744,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999253610191151e-05,
+ "loss": 0.3193,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999229342048404e-05,
+ "loss": 1.0274,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992046858164942e-05,
+ "loss": 0.2843,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999179641504999e-05,
+ "loss": 0.982,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991542091236438e-05,
+ "loss": 0.976,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991283886823075e-05,
+ "loss": 1.0374,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991021801910177e-05,
+ "loss": 1.0289,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999075583659954e-05,
+ "loss": 0.9761,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999048599099446e-05,
+ "loss": 0.2977,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990212265199738e-05,
+ "loss": 0.9407,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998993465932169e-05,
+ "loss": 1.0007,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989653173468137e-05,
+ "loss": 0.9877,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99893678077484e-05,
+ "loss": 0.9289,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989078562273313e-05,
+ "loss": 0.9585,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988785437155222e-05,
+ "loss": 0.9449,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988488432507963e-05,
+ "loss": 1.0345,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9988187548446895e-05,
+ "loss": 0.965,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998788278508888e-05,
+ "loss": 0.9971,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987574142552274e-05,
+ "loss": 0.9898,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987261620956964e-05,
+ "loss": 0.9984,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986945220424326e-05,
+ "loss": 1.0083,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998662494107724e-05,
+ "loss": 0.9371,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99863007830401e-05,
+ "loss": 1.024,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985972746438815e-05,
+ "loss": 1.0131,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985640831400778e-05,
+ "loss": 0.923,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998530503805491e-05,
+ "loss": 0.9833,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984965366531624e-05,
+ "loss": 0.981,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984621816962843e-05,
+ "loss": 0.9922,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984274389482005e-05,
+ "loss": 1.037,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983923084224047e-05,
+ "loss": 0.9879,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983567901325404e-05,
+ "loss": 0.9919,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983208840924028e-05,
+ "loss": 0.9303,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998284590315937e-05,
+ "loss": 0.9406,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982479088172403e-05,
+ "loss": 0.3251,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982108396105584e-05,
+ "loss": 0.9975,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981733827102884e-05,
+ "loss": 0.9497,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998135538130979e-05,
+ "loss": 0.9562,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998097305887328e-05,
+ "loss": 1.0052,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980586859941846e-05,
+ "loss": 0.9342,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998019678466548e-05,
+ "loss": 0.9237,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997980283319568e-05,
+ "loss": 0.9744,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979405005685466e-05,
+ "loss": 1.0382,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979003302289336e-05,
+ "loss": 0.9797,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997859772316331e-05,
+ "loss": 0.9955,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978188268464912e-05,
+ "loss": 0.8648,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997777493835317e-05,
+ "loss": 0.9995,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977357732988616e-05,
+ "loss": 0.9618,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976936652533288e-05,
+ "loss": 0.9682,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997651169715073e-05,
+ "loss": 0.9777,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976082867005985e-05,
+ "loss": 0.9652,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997565016226561e-05,
+ "loss": 1.0588,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997521358309766e-05,
+ "loss": 0.8892,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99747731296717e-05,
+ "loss": 0.9918,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9974328802158798e-05,
+ "loss": 0.9888,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997388060073152e-05,
+ "loss": 1.022,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973428525563948e-05,
+ "loss": 0.9987,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972972576831656e-05,
+ "loss": 0.9734,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972512754711738e-05,
+ "loss": 1.049,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997204905938278e-05,
+ "loss": 0.9298,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9971581491024873e-05,
+ "loss": 1.0159,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997111004981962e-05,
+ "loss": 0.9998,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970634735950117e-05,
+ "loss": 1.0013,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970155549600978e-05,
+ "loss": 0.9775,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969672490958304e-05,
+ "loss": 0.9639,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996918556020972e-05,
+ "loss": 0.9604,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996869475754434e-05,
+ "loss": 0.9464,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968200083152784e-05,
+ "loss": 0.9963,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9967701537227175e-05,
+ "loss": 0.9389,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996719911996115e-05,
+ "loss": 0.9534,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996669283154984e-05,
+ "loss": 0.8961,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996618267218988e-05,
+ "loss": 0.9537,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996566864207941e-05,
+ "loss": 0.9773,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9965150741418072e-05,
+ "loss": 0.9985,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964628970407018e-05,
+ "loss": 1.0109,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964103329248892e-05,
+ "loss": 0.9856,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996357381814785e-05,
+ "loss": 0.9531,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996304043730955e-05,
+ "loss": 1.0416,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962503186941143e-05,
+ "loss": 0.902,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961962067251298e-05,
+ "loss": 1.0041,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961417078450177e-05,
+ "loss": 0.3216,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996086822074945e-05,
+ "loss": 0.9695,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960315494362286e-05,
+ "loss": 1.0055,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959758899503355e-05,
+ "loss": 0.9274,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995919843638883e-05,
+ "loss": 1.0085,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958634105236395e-05,
+ "loss": 1.024,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958065906265228e-05,
+ "loss": 0.9575,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957493839696013e-05,
+ "loss": 0.931,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956917905750926e-05,
+ "loss": 1.014,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995633810465366e-05,
+ "loss": 0.9083,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99557544366294e-05,
+ "loss": 1.0107,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955166901904838e-05,
+ "loss": 0.9126,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954575500708164e-05,
+ "loss": 0.9656,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995398023326907e-05,
+ "loss": 0.95,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953381099818756e-05,
+ "loss": 0.9424,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952778100589912e-05,
+ "loss": 0.8988,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952171235816747e-05,
+ "loss": 1.0329,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9951560505734948e-05,
+ "loss": 1.0457,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950945910581718e-05,
+ "loss": 0.8971,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950327450595766e-05,
+ "loss": 0.9726,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949705126017286e-05,
+ "loss": 0.9883,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949078937087988e-05,
+ "loss": 0.987,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994844888405107e-05,
+ "loss": 0.9479,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947814967151246e-05,
+ "loss": 0.9239,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947177186634716e-05,
+ "loss": 0.9383,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946535542749187e-05,
+ "loss": 0.2709,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945890035743866e-05,
+ "loss": 1.053,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945240665869465e-05,
+ "loss": 0.9982,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9944587433378187e-05,
+ "loss": 1.0055,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994393033852374e-05,
+ "loss": 0.9182,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943269381561334e-05,
+ "loss": 1.0582,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994260456274768e-05,
+ "loss": 0.9732,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941935882340976e-05,
+ "loss": 0.9413,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994126334060094e-05,
+ "loss": 0.9014,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994058693778878e-05,
+ "loss": 0.9939,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939906674167192e-05,
+ "loss": 0.9712,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993922255000039e-05,
+ "loss": 0.9642,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993853456555408e-05,
+ "loss": 0.9423,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937842721095468e-05,
+ "loss": 1.0095,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937147016893257e-05,
+ "loss": 0.9853,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9936447453217646e-05,
+ "loss": 0.9414,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935744030340347e-05,
+ "loss": 0.9975,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935036748534555e-05,
+ "loss": 1.0131,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993432560807497e-05,
+ "loss": 0.9472,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993361060923779e-05,
+ "loss": 0.9754,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932891752300717e-05,
+ "loss": 0.9018,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932169037542947e-05,
+ "loss": 0.9971,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9931442465245164e-05,
+ "loss": 0.9472,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930712035689576e-05,
+ "loss": 0.9566,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992997774915986e-05,
+ "loss": 0.9609,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992923960594121e-05,
+ "loss": 1.0305,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9928497606320308e-05,
+ "loss": 0.9794,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992775175058535e-05,
+ "loss": 0.9911,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927002039026002e-05,
+ "loss": 0.8958,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9926248471933453e-05,
+ "loss": 0.998,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9925491049600382e-05,
+ "loss": 0.3126,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924729772320953e-05,
+ "loss": 0.9474,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923964640390846e-05,
+ "loss": 1.0056,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923195654107227e-05,
+ "loss": 0.952,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992242281376876e-05,
+ "loss": 0.9329,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921646119675606e-05,
+ "loss": 0.9567,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920865572129426e-05,
+ "loss": 1.0107,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920081171433377e-05,
+ "loss": 1.0179,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991929291789211e-05,
+ "loss": 1.0085,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918500811811778e-05,
+ "loss": 0.9612,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991770485350002e-05,
+ "loss": 0.9569,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991690504326597e-05,
+ "loss": 0.9991,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916101381420285e-05,
+ "loss": 0.9678,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9915293868275083e-05,
+ "loss": 0.9505,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9914482504143996e-05,
+ "loss": 0.9855,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9913667289342147e-05,
+ "loss": 0.9686,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991284822418616e-05,
+ "loss": 0.9202,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9912025308994146e-05,
+ "loss": 0.9758,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9911198544085723e-05,
+ "loss": 1.0149,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991036792978199e-05,
+ "loss": 1.0112,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990953346640555e-05,
+ "loss": 0.9875,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9908695154280496e-05,
+ "loss": 0.9437,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907852993732425e-05,
+ "loss": 0.9477,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990700698508842e-05,
+ "loss": 0.9369,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990615712867706e-05,
+ "loss": 0.9131,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9905303424828418e-05,
+ "loss": 1.022,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904445873874068e-05,
+ "loss": 0.9213,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9903584476147066e-05,
+ "loss": 0.9923,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902719231981975e-05,
+ "loss": 0.9728,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9901850141714843e-05,
+ "loss": 0.9763,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900977205683213e-05,
+ "loss": 0.9372,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900100424226124e-05,
+ "loss": 0.9181,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9899219797684113e-05,
+ "loss": 0.9654,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.98983353263992e-05,
+ "loss": 0.9802,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897447010714905e-05,
+ "loss": 0.9552,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989655485097624e-05,
+ "loss": 0.9995,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989565884752971e-05,
+ "loss": 0.9242,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9894759000723308e-05,
+ "loss": 1.0011,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893855310906526e-05,
+ "loss": 0.9923,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892947778430352e-05,
+ "loss": 0.9929,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892036403647256e-05,
+ "loss": 1.0172,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891121186911207e-05,
+ "loss": 0.9918,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9890202128577664e-05,
+ "loss": 0.9967,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988927922900358e-05,
+ "loss": 0.9102,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9888352488547397e-05,
+ "loss": 0.9903,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988742190756905e-05,
+ "loss": 0.9354,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9886487486429966e-05,
+ "loss": 1.0028,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9885549225493064e-05,
+ "loss": 0.9658,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9884607125122753e-05,
+ "loss": 0.94,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988366118568494e-05,
+ "loss": 0.9509,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988271140754701e-05,
+ "loss": 0.9642,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881757791077848e-05,
+ "loss": 0.9849,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9880800336647825e-05,
+ "loss": 0.9676,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987983904462881e-05,
+ "loss": 0.9746,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9878873915394154e-05,
+ "loss": 1.0209,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9877904949318704e-05,
+ "loss": 0.9741,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9876932146778796e-05,
+ "loss": 0.9492,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875955508152254e-05,
+ "loss": 0.2791,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987497503381839e-05,
+ "loss": 0.2944,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9873990724158014e-05,
+ "loss": 0.9199,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987300257955342e-05,
+ "loss": 0.96,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987201060038839e-05,
+ "loss": 0.9848,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9871014787048197e-05,
+ "loss": 0.985,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9870015139919606e-05,
+ "loss": 0.9003,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869011659390866e-05,
+ "loss": 1.0227,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9868004345851716e-05,
+ "loss": 0.9831,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9866993199693393e-05,
+ "loss": 0.9358,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98659782213086e-05,
+ "loss": 0.9757,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986495941109156e-05,
+ "loss": 1.0239,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863936769437956e-05,
+ "loss": 0.9802,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986291029674497e-05,
+ "loss": 0.9725,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986187999341128e-05,
+ "loss": 1.008,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9860845859837034e-05,
+ "loss": 0.9516,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985980789642388e-05,
+ "loss": 0.8976,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985876610357496e-05,
+ "loss": 0.9699,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9857720481694887e-05,
+ "loss": 0.9561,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856671031189765e-05,
+ "loss": 0.9651,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98556177524672e-05,
+ "loss": 0.9456,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9854560645936262e-05,
+ "loss": 1.0112,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9853499712007523e-05,
+ "loss": 0.9635,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852434951093035e-05,
+ "loss": 0.8902,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985136636360635e-05,
+ "loss": 0.9033,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985029394996248e-05,
+ "loss": 0.2969,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9849217710577945e-05,
+ "loss": 0.3144,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9848137645870745e-05,
+ "loss": 0.9346,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847053756260363e-05,
+ "loss": 0.9726,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984596604216777e-05,
+ "loss": 0.9481,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984487450401542e-05,
+ "loss": 0.9606,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9843779142227258e-05,
+ "loss": 0.8909,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9842679957228706e-05,
+ "loss": 1.0256,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9841576949446675e-05,
+ "loss": 0.9613,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984047011930956e-05,
+ "loss": 0.9351,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9839359467247243e-05,
+ "loss": 0.9766,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983824499369109e-05,
+ "loss": 1.0027,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9837126699073948e-05,
+ "loss": 0.9637,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836004583830146e-05,
+ "loss": 0.9507,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9834878648395507e-05,
+ "loss": 0.9815,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9833748893207326e-05,
+ "loss": 0.9587,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9832615318704388e-05,
+ "loss": 0.8957,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831477925326962e-05,
+ "loss": 1.0338,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98303367135168e-05,
+ "loss": 0.9901,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829191683717133e-05,
+ "loss": 0.9134,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9828042836372677e-05,
+ "loss": 0.9366,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9826890171929634e-05,
+ "loss": 0.9063,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982573369083568e-05,
+ "loss": 0.936,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824573393539984e-05,
+ "loss": 0.9738,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982340928049319e-05,
+ "loss": 0.9805,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241352147426e-05,
+ "loss": 0.9481,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9821069608956307e-05,
+ "loss": 0.9224,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819894051374917e-05,
+ "loss": 0.9435,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981871467985983e-05,
+ "loss": 0.9558,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817531494869105e-05,
+ "loss": 0.9753,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9816344496862272e-05,
+ "loss": 0.9506,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9815153686300352e-05,
+ "loss": 0.9922,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981395906364584e-05,
+ "loss": 1.007,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812760629362714e-05,
+ "loss": 0.9239,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981155838391643e-05,
+ "loss": 0.8999,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9810352327773935e-05,
+ "loss": 0.8966,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809142461403635e-05,
+ "loss": 0.8609,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9807928785275433e-05,
+ "loss": 1.0235,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980671129986071e-05,
+ "loss": 0.9631,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9805490005632323e-05,
+ "loss": 1.0053,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98042649030646e-05,
+ "loss": 0.9878,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9803035992633366e-05,
+ "loss": 1.0371,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9801803274815915e-05,
+ "loss": 1.0088,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9800566750091018e-05,
+ "loss": 0.9889,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9799326418938924e-05,
+ "loss": 0.9152,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979808228184137e-05,
+ "loss": 0.9684,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9796834339281557e-05,
+ "loss": 0.9171,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979558259174418e-05,
+ "loss": 0.9473,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794327039715395e-05,
+ "loss": 0.9039,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979306768368285e-05,
+ "loss": 0.9673,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9791804524135663e-05,
+ "loss": 0.9844,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979053756156443e-05,
+ "loss": 0.9177,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789266796461222e-05,
+ "loss": 0.9703,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9787992229319594e-05,
+ "loss": 0.9167,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978671386063457e-05,
+ "loss": 0.9837,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978543169090265e-05,
+ "loss": 0.3561,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9784145720621827e-05,
+ "loss": 0.9968,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9782855950291542e-05,
+ "loss": 0.963,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978156238041274e-05,
+ "loss": 0.9685,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9780265011487822e-05,
+ "loss": 0.9381,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9778963844020668e-05,
+ "loss": 0.9407,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977765887851664e-05,
+ "loss": 0.9165,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977635011548257e-05,
+ "loss": 0.3316,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775037555426772e-05,
+ "loss": 0.9159,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773721198859024e-05,
+ "loss": 0.9379,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9772401046290584e-05,
+ "loss": 0.9768,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9771077098234187e-05,
+ "loss": 0.9927,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9769749355204034e-05,
+ "loss": 0.9546,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976841781771581e-05,
+ "loss": 0.9958,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767082486286667e-05,
+ "loss": 0.9466,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9765743361435234e-05,
+ "loss": 0.9159,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9764400443681607e-05,
+ "loss": 0.8767,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9763053733547367e-05,
+ "loss": 1.0446,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976170323155555e-05,
+ "loss": 0.9448,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976034893823069e-05,
+ "loss": 0.9132,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975899085409876e-05,
+ "loss": 0.9193,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9757628979687247e-05,
+ "loss": 0.9665,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975626331552507e-05,
+ "loss": 0.9742,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9754893862142643e-05,
+ "loss": 0.294,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9753520620071846e-05,
+ "loss": 1.0007,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9752143589846027e-05,
+ "loss": 0.9719,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750762772000014e-05,
+ "loss": 0.9857,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9749378167070097e-05,
+ "loss": 0.9214,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9747989775594044e-05,
+ "loss": 0.9272,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974659759811109e-05,
+ "loss": 0.9527,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9745201635161938e-05,
+ "loss": 0.9597,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9743801887288762e-05,
+ "loss": 0.9602,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9742398355035212e-05,
+ "loss": 0.9721,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9740991038946404e-05,
+ "loss": 0.3203,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973957993956892e-05,
+ "loss": 0.9899,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9738165057450817e-05,
+ "loss": 0.9541,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736746393141617e-05,
+ "loss": 0.9986,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9735323947192317e-05,
+ "loss": 1.0268,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733897720155377e-05,
+ "loss": 0.9229,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9732467712584723e-05,
+ "loss": 0.9152,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973103392503576e-05,
+ "loss": 1.0084,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9729596358065347e-05,
+ "loss": 0.9658,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9728155012231825e-05,
+ "loss": 0.9228,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9726709888094994e-05,
+ "loss": 0.9909,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972526098621612e-05,
+ "loss": 0.955,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972380830715795e-05,
+ "loss": 0.9968,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9722351851484677e-05,
+ "loss": 0.9466,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9720891619761974e-05,
+ "loss": 0.9519,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9719427612556982e-05,
+ "loss": 1.0199,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9717959830438302e-05,
+ "loss": 0.9054,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9716488273976006e-05,
+ "loss": 0.9618,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971501294374162e-05,
+ "loss": 0.9405,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971353384030816e-05,
+ "loss": 0.9531,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9712050964250083e-05,
+ "loss": 0.9163,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9710564316143323e-05,
+ "loss": 0.9231,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9709073896565276e-05,
+ "loss": 0.9702,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9707579706094807e-05,
+ "loss": 0.9434,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970608174531224e-05,
+ "loss": 0.9116,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970458001479937e-05,
+ "loss": 0.9492,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9703074515139445e-05,
+ "loss": 0.951,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9701565246917184e-05,
+ "loss": 0.968,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9700052210718775e-05,
+ "loss": 0.9962,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969853540713186e-05,
+ "loss": 1.0122,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9697014836745552e-05,
+ "loss": 0.9703,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9695490500150418e-05,
+ "loss": 0.9328,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693962397938495e-05,
+ "loss": 0.97,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9692430530703282e-05,
+ "loss": 0.9872,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690894899039735e-05,
+ "loss": 1.015,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9689355503544277e-05,
+ "loss": 0.903,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968781234481479e-05,
+ "loss": 0.9144,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686265423450624e-05,
+ "loss": 0.9404,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9684714740052584e-05,
+ "loss": 0.9781,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9683160295222934e-05,
+ "loss": 0.9543,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9681602089565403e-05,
+ "loss": 0.9393,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968004012368518e-05,
+ "loss": 0.9467,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967847439818892e-05,
+ "loss": 0.8951,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9676904913684725e-05,
+ "loss": 0.9328,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967533167078217e-05,
+ "loss": 0.9344,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9673754670092283e-05,
+ "loss": 0.979,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9672173912227556e-05,
+ "loss": 0.9351,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967058939780193e-05,
+ "loss": 0.9756,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966900112743082e-05,
+ "loss": 0.886,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966740910173108e-05,
+ "loss": 0.9123,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9665813321321054e-05,
+ "loss": 0.958,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9664213786820502e-05,
+ "loss": 0.956,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9662610498850684e-05,
+ "loss": 0.928,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9661003458034288e-05,
+ "loss": 0.9454,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965939266499547e-05,
+ "loss": 0.3032,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9657778120359848e-05,
+ "loss": 0.356,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965615982475449e-05,
+ "loss": 0.9396,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9654537778807924e-05,
+ "loss": 0.9366,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9652911983150135e-05,
+ "loss": 0.9995,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965128243841256e-05,
+ "loss": 0.9299,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96496491452281e-05,
+ "loss": 1.0017,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9648012104231106e-05,
+ "loss": 0.9755,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964637131605738e-05,
+ "loss": 0.9781,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9644726781344197e-05,
+ "loss": 0.9331,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964307850073026e-05,
+ "loss": 0.9641,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964142647485576e-05,
+ "loss": 0.948,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9639770704362305e-05,
+ "loss": 0.9493,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9638111189892994e-05,
+ "loss": 0.9352,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636447932092354e-05,
+ "loss": 0.9591,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963478093160638e-05,
+ "loss": 0.359,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633110189082515e-05,
+ "loss": 0.94,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963143570516965e-05,
+ "loss": 0.9336,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629757480518144e-05,
+ "loss": 0.9057,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9628075515779796e-05,
+ "loss": 0.9284,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.962638981160786e-05,
+ "loss": 0.9744,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9624700368657045e-05,
+ "loss": 0.9535,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9623007187583518e-05,
+ "loss": 0.9348,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962131026904488e-05,
+ "loss": 0.9052,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.96196096137002e-05,
+ "loss": 0.981,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9617905222209998e-05,
+ "loss": 0.9034,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961619709523623e-05,
+ "loss": 0.9294,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9614485233442316e-05,
+ "loss": 0.9432,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961276963749313e-05,
+ "loss": 0.9437,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9611050308054982e-05,
+ "loss": 0.9222,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9609327245795642e-05,
+ "loss": 0.9645,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9607600451384327e-05,
+ "loss": 0.9591,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960586992549171e-05,
+ "loss": 0.3233,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9604135668789897e-05,
+ "loss": 0.3091,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9602397681952462e-05,
+ "loss": 0.9029,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9600655965654413e-05,
+ "loss": 0.9137,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959891052057222e-05,
+ "loss": 0.9258,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597161347383783e-05,
+ "loss": 1.0029,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959540844676847e-05,
+ "loss": 0.9326,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9593651819407084e-05,
+ "loss": 0.9146,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959189146598188e-05,
+ "loss": 0.9942,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590127387176556e-05,
+ "loss": 0.9462,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9588359583676263e-05,
+ "loss": 0.9417,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9586588056167595e-05,
+ "loss": 0.9543,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958481280533859e-05,
+ "loss": 0.9091,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958303383187874e-05,
+ "loss": 0.9614,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9581251136478974e-05,
+ "loss": 0.966,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579464719831668e-05,
+ "loss": 1.0124,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9577674582630653e-05,
+ "loss": 0.9958,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957588072557119e-05,
+ "loss": 0.9447,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957408314935e-05,
+ "loss": 0.8778,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9572281854665233e-05,
+ "loss": 0.9647,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95704768422165e-05,
+ "loss": 0.9164,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956866811270484e-05,
+ "loss": 0.9681,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9566855666832743e-05,
+ "loss": 0.9696,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9565039505304145e-05,
+ "loss": 0.9038,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956321962882442e-05,
+ "loss": 0.9858,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956139603810039e-05,
+ "loss": 0.9405,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9559568733840317e-05,
+ "loss": 0.9368,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9557737716753898e-05,
+ "loss": 0.9261,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9555902987552283e-05,
+ "loss": 0.952,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9554064546948064e-05,
+ "loss": 0.9369,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9552222395655262e-05,
+ "loss": 0.8745,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9550376534389355e-05,
+ "loss": 0.9598,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9548526963867253e-05,
+ "loss": 0.985,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9546673684807303e-05,
+ "loss": 0.9148,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95448166979293e-05,
+ "loss": 0.9259,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9542956003954477e-05,
+ "loss": 0.9543,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541091603605508e-05,
+ "loss": 0.8677,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95392234976065e-05,
+ "loss": 0.9009,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537351686683003e-05,
+ "loss": 0.9323,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9535476171562014e-05,
+ "loss": 0.9528,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533596952971955e-05,
+ "loss": 0.9111,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9531714031642698e-05,
+ "loss": 0.8794,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529827408305542e-05,
+ "loss": 0.9228,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9527937083693233e-05,
+ "loss": 0.978,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952604305853995e-05,
+ "loss": 0.9506,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9524145333581315e-05,
+ "loss": 0.994,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9522243909554375e-05,
+ "loss": 0.969,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952033878719763e-05,
+ "loss": 1.0084,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518429967251e-05,
+ "loss": 0.3728,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9516517450455853e-05,
+ "loss": 0.9258,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951460123755499e-05,
+ "loss": 0.8782,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9512681329292635e-05,
+ "loss": 0.9232,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951075772641447e-05,
+ "loss": 0.9109,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95088304296676e-05,
+ "loss": 0.9142,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950689943980056e-05,
+ "loss": 0.9372,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9504964757563322e-05,
+ "loss": 0.9459,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95030263837073e-05,
+ "loss": 0.9116,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9501084318985335e-05,
+ "loss": 0.9795,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94991385641517e-05,
+ "loss": 0.9757,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9497189119962105e-05,
+ "loss": 0.987,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9495235987173693e-05,
+ "loss": 0.8944,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949327916654504e-05,
+ "loss": 0.985,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949131865883614e-05,
+ "loss": 0.8853,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948935446480845e-05,
+ "loss": 0.91,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948738658522483e-05,
+ "loss": 0.9634,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485415020849583e-05,
+ "loss": 0.358,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9483439772448444e-05,
+ "loss": 0.8701,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481460840788573e-05,
+ "loss": 0.8917,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9479478226638565e-05,
+ "loss": 0.3685,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.947749193076845e-05,
+ "loss": 0.9397,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9475501953949674e-05,
+ "loss": 0.9149,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473508296955126e-05,
+ "loss": 0.3384,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9471510960559122e-05,
+ "loss": 0.9735,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469509945537395e-05,
+ "loss": 1.0004,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9467505252667126e-05,
+ "loss": 0.9774,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9465496882726913e-05,
+ "loss": 0.9733,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946348483649678e-05,
+ "loss": 0.9641,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461469114758184e-05,
+ "loss": 0.9397,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9459449718294008e-05,
+ "loss": 0.9411,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945742664788856e-05,
+ "loss": 0.9532,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9455399904327585e-05,
+ "loss": 0.9781,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945336948839824e-05,
+ "loss": 0.9334,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9451335400889114e-05,
+ "loss": 0.9683,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944929764259023e-05,
+ "loss": 0.3255,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9447256214293026e-05,
+ "loss": 0.9136,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9445211116790365e-05,
+ "loss": 0.9113,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9443162350876544e-05,
+ "loss": 0.9494,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944110991734728e-05,
+ "loss": 0.8912,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9439053816999717e-05,
+ "loss": 0.9126,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9436994050632415e-05,
+ "loss": 0.932,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9434930619045367e-05,
+ "loss": 0.9146,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9432863523039986e-05,
+ "loss": 0.9433,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9430792763419105e-05,
+ "loss": 0.949,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.942871834098699e-05,
+ "loss": 0.9449,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9426640256549313e-05,
+ "loss": 0.3057,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9424558510913186e-05,
+ "loss": 0.968,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422473104887133e-05,
+ "loss": 0.9604,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9420384039281103e-05,
+ "loss": 0.9836,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941829131490646e-05,
+ "loss": 0.9014,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9416194932576e-05,
+ "loss": 0.9281,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941409489310393e-05,
+ "loss": 0.8974,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9411991197305878e-05,
+ "loss": 0.9138,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9409883845998905e-05,
+ "loss": 0.955,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9407772840001473e-05,
+ "loss": 0.9367,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405658180133477e-05,
+ "loss": 0.9613,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9403539867216226e-05,
+ "loss": 0.9882,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9401417902072447e-05,
+ "loss": 0.9232,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9399292285526286e-05,
+ "loss": 1.0081,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939716301840331e-05,
+ "loss": 0.9325,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9395030101530504e-05,
+ "loss": 0.969,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939289353573626e-05,
+ "loss": 0.9948,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9390753321850404e-05,
+ "loss": 0.9315,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938860946070417e-05,
+ "loss": 0.9175,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93864619531302e-05,
+ "loss": 0.9635,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9384310799962575e-05,
+ "loss": 0.9699,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9382156002036764e-05,
+ "loss": 0.8872,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9379997560189677e-05,
+ "loss": 0.9445,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937783547525962e-05,
+ "loss": 0.9454,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9375669748086326e-05,
+ "loss": 0.9394,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937350037951094e-05,
+ "loss": 0.9521,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9371327370376018e-05,
+ "loss": 0.9497,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936915072152553e-05,
+ "loss": 0.912,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936697043380486e-05,
+ "loss": 0.3029,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936478650806081e-05,
+ "loss": 0.9439,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936259894514159e-05,
+ "loss": 0.9478,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360407745896828e-05,
+ "loss": 0.9617,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9358212911177556e-05,
+ "loss": 0.8989,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935601444183622e-05,
+ "loss": 0.9565,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935381233872669e-05,
+ "loss": 0.8481,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935160660270423e-05,
+ "loss": 0.9559,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934939723462552e-05,
+ "loss": 0.907,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9347184235348663e-05,
+ "loss": 0.9452,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9344967605733154e-05,
+ "loss": 0.8931,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934274734663991e-05,
+ "loss": 0.9234,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934052345893125e-05,
+ "loss": 0.9326,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9338295943470915e-05,
+ "loss": 0.9554,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9336064801124034e-05,
+ "loss": 0.9385,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933383003275717e-05,
+ "loss": 0.9185,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933159163923827e-05,
+ "loss": 0.947,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9329349621436708e-05,
+ "loss": 0.9201,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9327103980223255e-05,
+ "loss": 0.3265,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932485471647009e-05,
+ "loss": 0.3056,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9322601831050804e-05,
+ "loss": 0.9428,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9320345324840396e-05,
+ "loss": 0.9848,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318085198715257e-05,
+ "loss": 0.9482,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93158214535532e-05,
+ "loss": 0.9088,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9313554090233436e-05,
+ "loss": 0.9325,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9311283109636586e-05,
+ "loss": 0.9473,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309008512644668e-05,
+ "loss": 0.9608,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930673030014111e-05,
+ "loss": 1.0158,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930444847301075e-05,
+ "loss": 0.953,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9302163032139813e-05,
+ "loss": 0.9279,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9299873978415947e-05,
+ "loss": 0.9526,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9297581312728187e-05,
+ "loss": 0.9076,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528503596698e-05,
+ "loss": 0.9154,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929298514902418e-05,
+ "loss": 0.9768,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929068165279303e-05,
+ "loss": 0.9425,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928837454816818e-05,
+ "loss": 0.9292,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9286063836045687e-05,
+ "loss": 0.9183,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9283749517323e-05,
+ "loss": 0.9553,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928143159289898e-05,
+ "loss": 0.9285,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927911006367388e-05,
+ "loss": 0.8953,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927678493054935e-05,
+ "loss": 1.01,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9274456194428454e-05,
+ "loss": 0.8866,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9272123856215643e-05,
+ "loss": 0.948,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9269787916816764e-05,
+ "loss": 0.9182,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9267448377139074e-05,
+ "loss": 0.9536,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9265105238091227e-05,
+ "loss": 0.9415,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9262758500583265e-05,
+ "loss": 0.8991,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9260408165526638e-05,
+ "loss": 0.9534,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9258054233834184e-05,
+ "loss": 0.8794,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9255696706420147e-05,
+ "loss": 0.9673,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9253335584200165e-05,
+ "loss": 0.8788,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9250970868091268e-05,
+ "loss": 0.9376,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924860255901188e-05,
+ "loss": 0.8811,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246230657881834e-05,
+ "loss": 0.9476,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9243855165622345e-05,
+ "loss": 0.9362,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241476083156026e-05,
+ "loss": 0.9155,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9239093411406885e-05,
+ "loss": 0.9678,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236707151300326e-05,
+ "loss": 0.8807,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9234317303763145e-05,
+ "loss": 0.8831,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231923869723528e-05,
+ "loss": 0.3529,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922952685011106e-05,
+ "loss": 0.964,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9227126245856716e-05,
+ "loss": 0.3712,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922472205789286e-05,
+ "loss": 0.9838,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222314287153255e-05,
+ "loss": 0.299,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9219902934573048e-05,
+ "loss": 0.9794,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9217488001088784e-05,
+ "loss": 0.9706,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9215069487638396e-05,
+ "loss": 0.991,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.92126473951612e-05,
+ "loss": 0.9366,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921022172459791e-05,
+ "loss": 0.9889,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920779247689064e-05,
+ "loss": 0.9183,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9205359652982868e-05,
+ "loss": 0.9689,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9202923253819482e-05,
+ "loss": 0.9095,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920048328034675e-05,
+ "loss": 0.8812,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9198039733512326e-05,
+ "loss": 0.9137,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9195592614265262e-05,
+ "loss": 0.975,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9193141923555984e-05,
+ "loss": 0.8885,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.919068766233632e-05,
+ "loss": 0.854,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9188229831559468e-05,
+ "loss": 0.9822,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9185768432180026e-05,
+ "loss": 0.912,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9183303465153972e-05,
+ "loss": 0.9205,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9180834931438673e-05,
+ "loss": 0.9676,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917836283199288e-05,
+ "loss": 0.8845,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917588716777672e-05,
+ "loss": 0.9723,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917340793975172e-05,
+ "loss": 0.9122,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917092514888078e-05,
+ "loss": 0.9583,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9168438796128193e-05,
+ "loss": 0.9079,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9165948882459623e-05,
+ "loss": 0.8845,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9163455408842123e-05,
+ "loss": 0.9921,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160958376244138e-05,
+ "loss": 0.9166,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9158457785635478e-05,
+ "loss": 0.9785,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915595363798735e-05,
+ "loss": 0.8986,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915344593427233e-05,
+ "loss": 0.9226,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150934675464384e-05,
+ "loss": 0.8712,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9148419862538858e-05,
+ "loss": 0.9654,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145901496472474e-05,
+ "loss": 0.9771,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9143379578243335e-05,
+ "loss": 0.9436,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.914085410883093e-05,
+ "loss": 0.9894,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9138325089216118e-05,
+ "loss": 0.9068,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913579252038114e-05,
+ "loss": 0.9422,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9133256403309627e-05,
+ "loss": 0.9182,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913071673898656e-05,
+ "loss": 0.9261,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.912817352839833e-05,
+ "loss": 0.8802,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9125626772532683e-05,
+ "loss": 0.877,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9123076472378753e-05,
+ "loss": 0.9579,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9120522628927047e-05,
+ "loss": 0.9898,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9117965243169444e-05,
+ "loss": 0.9051,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9115404316099212e-05,
+ "loss": 0.9402,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9112839848710978e-05,
+ "loss": 0.9451,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9110271842000755e-05,
+ "loss": 0.3687,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9107700296965926e-05,
+ "loss": 0.9534,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910512521460525e-05,
+ "loss": 0.9271,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9102546595918857e-05,
+ "loss": 1.0075,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9099964441908258e-05,
+ "loss": 0.9131,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097378753576327e-05,
+ "loss": 0.9214,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9094789531927315e-05,
+ "loss": 0.9203,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.909219677796685e-05,
+ "loss": 0.9698,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9089600492701926e-05,
+ "loss": 0.345,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908700067714091e-05,
+ "loss": 0.3491,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9084397332293537e-05,
+ "loss": 0.9524,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081790459170926e-05,
+ "loss": 0.9527,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9079180058785547e-05,
+ "loss": 0.9158,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9076566132151255e-05,
+ "loss": 0.9576,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.907394868028326e-05,
+ "loss": 0.9538,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9071327704198163e-05,
+ "loss": 0.9251,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906870320491391e-05,
+ "loss": 0.8861,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9066075183449835e-05,
+ "loss": 0.9199,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9063443640826624e-05,
+ "loss": 0.8862,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906080857806634e-05,
+ "loss": 0.9411,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905816999619242e-05,
+ "loss": 0.955,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9055527896229642e-05,
+ "loss": 0.9152,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905288227920418e-05,
+ "loss": 0.8776,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9050233146143554e-05,
+ "loss": 0.9462,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9047580498076663e-05,
+ "loss": 1.0074,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904492433603376e-05,
+ "loss": 0.8786,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904226466104647e-05,
+ "loss": 0.9435,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903960147414778e-05,
+ "loss": 0.9668,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903693477637204e-05,
+ "loss": 0.3139,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9034264568754967e-05,
+ "loss": 0.9452,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9031590852333637e-05,
+ "loss": 0.9357,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9028913628146487e-05,
+ "loss": 0.9148,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902623289723333e-05,
+ "loss": 0.9018,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902354866063532e-05,
+ "loss": 0.9095,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020860919394992e-05,
+ "loss": 0.8821,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9018169674556228e-05,
+ "loss": 0.8456,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901547492716428e-05,
+ "loss": 0.9265,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9012776678265756e-05,
+ "loss": 0.9095,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9010074928908624e-05,
+ "loss": 0.9256,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900736968014221e-05,
+ "loss": 0.9216,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004660933017208e-05,
+ "loss": 0.9195,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900194868858566e-05,
+ "loss": 0.9252,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8999232947900968e-05,
+ "loss": 0.916,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89965137120179e-05,
+ "loss": 0.3312,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899379098199257e-05,
+ "loss": 0.8992,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899106475888246e-05,
+ "loss": 0.9512,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89883350437464e-05,
+ "loss": 0.9483,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8985601837644586e-05,
+ "loss": 0.954,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8982865141638557e-05,
+ "loss": 0.9777,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8980124956791216e-05,
+ "loss": 0.9181,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8977381284166818e-05,
+ "loss": 0.9654,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897463412483098e-05,
+ "loss": 0.8833,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897188347985066e-05,
+ "loss": 0.9304,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896912935029418e-05,
+ "loss": 0.9227,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896637173723121e-05,
+ "loss": 0.9524,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8963610641732777e-05,
+ "loss": 0.9938,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8960846064871257e-05,
+ "loss": 0.8756,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8958078007720387e-05,
+ "loss": 0.8807,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895530647135524e-05,
+ "loss": 0.9026,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8952531456852248e-05,
+ "loss": 0.3359,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8949752965289197e-05,
+ "loss": 0.8913,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894697099774523e-05,
+ "loss": 0.8688,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894418555530082e-05,
+ "loss": 0.9398,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89413966390378e-05,
+ "loss": 0.9413,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8938604250039362e-05,
+ "loss": 0.9731,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935808389390032e-05,
+ "loss": 0.9106,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.893300905817569e-05,
+ "loss": 0.8899,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8930206257483566e-05,
+ "loss": 0.983,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8927399988402233e-05,
+ "loss": 0.9512,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8924590252021614e-05,
+ "loss": 0.9165,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8921777049432985e-05,
+ "loss": 0.35,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8918960381728947e-05,
+ "loss": 0.9625,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8916140250003475e-05,
+ "loss": 0.905,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891331665535187e-05,
+ "loss": 0.9542,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8910489598870784e-05,
+ "loss": 0.9589,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8907659081658214e-05,
+ "loss": 0.9409,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8904825104813497e-05,
+ "loss": 0.89,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8901987669437322e-05,
+ "loss": 0.944,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.889914677663171e-05,
+ "loss": 0.9217,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8896302427500042e-05,
+ "loss": 0.8912,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8893454623147017e-05,
+ "loss": 0.9592,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88906033646787e-05,
+ "loss": 0.9194,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8887748653202478e-05,
+ "loss": 0.9415,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8884890489827097e-05,
+ "loss": 0.8378,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8882028875662627e-05,
+ "loss": 0.8577,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8879163811820493e-05,
+ "loss": 0.9159,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876295299413445e-05,
+ "loss": 0.8698,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8873423339555584e-05,
+ "loss": 0.9418,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8870547933362352e-05,
+ "loss": 0.3612,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886766908195051e-05,
+ "loss": 0.9417,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864786786438187e-05,
+ "loss": 0.9079,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886190104794482e-05,
+ "loss": 0.9634,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8859011867591203e-05,
+ "loss": 0.9203,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885611924649946e-05,
+ "loss": 0.9226,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885322318579305e-05,
+ "loss": 0.9712,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8850323686596766e-05,
+ "loss": 0.9656,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8847420750036748e-05,
+ "loss": 0.8569,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884451437724046e-05,
+ "loss": 0.9105,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8841604569336702e-05,
+ "loss": 0.9168,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883869132745561e-05,
+ "loss": 0.8854,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883577465272866e-05,
+ "loss": 0.8706,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8832854546288642e-05,
+ "loss": 0.9097,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8829931009269707e-05,
+ "loss": 0.9096,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882700404280731e-05,
+ "loss": 0.932,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8824073648038258e-05,
+ "loss": 0.9086,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882113982610068e-05,
+ "loss": 0.9184,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881820257813404e-05,
+ "loss": 0.9778,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8815261905279133e-05,
+ "loss": 0.9168,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8812317808678075e-05,
+ "loss": 0.3528,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8809370289474327e-05,
+ "loss": 0.9731,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8806419348812673e-05,
+ "loss": 0.9179,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8803464987839217e-05,
+ "loss": 0.9582,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88005072077014e-05,
+ "loss": 0.947,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797546009547996e-05,
+ "loss": 0.958,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.879458139452909e-05,
+ "loss": 0.9568,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791613363796118e-05,
+ "loss": 0.9292,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8788641918501817e-05,
+ "loss": 0.8576,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785667059800264e-05,
+ "loss": 0.8156,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8782688788846865e-05,
+ "loss": 0.8837,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877970710679834e-05,
+ "loss": 0.9345,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877672201481275e-05,
+ "loss": 0.8941,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877373351404946e-05,
+ "loss": 0.9011,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8770741605669173e-05,
+ "loss": 0.9253,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.876774629083391e-05,
+ "loss": 0.9121,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8764747570707017e-05,
+ "loss": 0.937,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8761745446453167e-05,
+ "loss": 0.9532,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875873991923835e-05,
+ "loss": 0.9758,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875573099022987e-05,
+ "loss": 0.9477,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8752718660596367e-05,
+ "loss": 0.9285,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8749702931507797e-05,
+ "loss": 0.904,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.874668380413543e-05,
+ "loss": 0.8815,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8743661279651856e-05,
+ "loss": 0.9679,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8740635359231e-05,
+ "loss": 0.9621,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8737606044048086e-05,
+ "loss": 0.8696,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873457333527967e-05,
+ "loss": 0.9741,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873153723410362e-05,
+ "loss": 0.3411,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8728497741699115e-05,
+ "loss": 0.9085,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872545485924667e-05,
+ "loss": 0.9056,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8722408587928104e-05,
+ "loss": 1.0055,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8719358928926546e-05,
+ "loss": 0.9415,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8716305883426456e-05,
+ "loss": 0.8918,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.87132494526136e-05,
+ "loss": 0.8949,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710189637675055e-05,
+ "loss": 0.8764,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8707126439799225e-05,
+ "loss": 0.8984,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.870405986017582e-05,
+ "loss": 0.8995,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8700989899995857e-05,
+ "loss": 0.9052,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697916560451682e-05,
+ "loss": 0.923,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.869483984273694e-05,
+ "loss": 0.8883,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691759748046594e-05,
+ "loss": 0.952,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8688676277576916e-05,
+ "loss": 0.971,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868558943252549e-05,
+ "loss": 0.908,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868249921409122e-05,
+ "loss": 0.9385,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8679405623474294e-05,
+ "loss": 0.9478,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8676308661876242e-05,
+ "loss": 0.9882,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8673208330499884e-05,
+ "loss": 0.898,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8670104630549348e-05,
+ "loss": 0.9524,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866699756323008e-05,
+ "loss": 0.8181,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866388712974883e-05,
+ "loss": 0.9904,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866077333131365e-05,
+ "loss": 0.8949,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8657656169133908e-05,
+ "loss": 0.9498,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8654535644420276e-05,
+ "loss": 0.8722,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8651411758384718e-05,
+ "loss": 0.8998,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8648284512240527e-05,
+ "loss": 0.8935,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8645153907202285e-05,
+ "loss": 0.8978,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8642019944485884e-05,
+ "loss": 0.331,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863888262530852e-05,
+ "loss": 0.9529,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863574195088869e-05,
+ "loss": 0.8517,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8632597922446195e-05,
+ "loss": 0.9371,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8629450541202142e-05,
+ "loss": 0.971,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626299808378933e-05,
+ "loss": 0.9409,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.862314572520028e-05,
+ "loss": 0.9195,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861998829289119e-05,
+ "loss": 1.0161,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861682751267798e-05,
+ "loss": 0.8846,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861366338578825e-05,
+ "loss": 0.9276,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8610495913450922e-05,
+ "loss": 0.9159,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8607325096896197e-05,
+ "loss": 0.3851,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8604150937355588e-05,
+ "loss": 0.8811,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.86009734360619e-05,
+ "loss": 0.9636,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8597792594249237e-05,
+ "loss": 0.9326,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8594608413153e-05,
+ "loss": 0.9532,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8591420894009897e-05,
+ "loss": 0.9115,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8588230038057913e-05,
+ "loss": 0.9345,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8585035846536347e-05,
+ "loss": 0.9732,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8581838320685782e-05,
+ "loss": 0.96,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8578637461748105e-05,
+ "loss": 0.946,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857543327096649e-05,
+ "loss": 0.9461,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.85722257495854e-05,
+ "loss": 0.9272,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856901489885061e-05,
+ "loss": 0.9934,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856580072000918e-05,
+ "loss": 0.8986,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8562583214309447e-05,
+ "loss": 0.9382,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855936238300106e-05,
+ "loss": 0.9913,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8556138227334957e-05,
+ "loss": 0.9356,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855291074856336e-05,
+ "loss": 0.9394,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8549679947939778e-05,
+ "loss": 0.9326,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8546445826719023e-05,
+ "loss": 0.9758,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8543208386157195e-05,
+ "loss": 0.9908,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853996762751167e-05,
+ "loss": 0.9939,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8536723552041124e-05,
+ "loss": 0.9334,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853347616100552e-05,
+ "loss": 0.8662,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8530225455666103e-05,
+ "loss": 0.874,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8526971437285416e-05,
+ "loss": 0.9607,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8523714107127278e-05,
+ "loss": 0.9436,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520453466456797e-05,
+ "loss": 0.9564,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8517189516540376e-05,
+ "loss": 0.9919,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513922258645687e-05,
+ "loss": 0.9457,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8510651694041702e-05,
+ "loss": 0.9113,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8507377823998664e-05,
+ "loss": 0.889,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.850410064978811e-05,
+ "loss": 0.8957,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8500820172682858e-05,
+ "loss": 0.9713,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8497536393957005e-05,
+ "loss": 0.3168,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8494249314885932e-05,
+ "loss": 0.8657,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8490958936746304e-05,
+ "loss": 0.9021,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.848766526081607e-05,
+ "loss": 0.8838,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8484368288374452e-05,
+ "loss": 0.9475,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8481068020701954e-05,
+ "loss": 0.9845,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8477764459080364e-05,
+ "loss": 0.8957,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8474457604792746e-05,
+ "loss": 0.3215,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8471147459123447e-05,
+ "loss": 0.9442,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8467834023358088e-05,
+ "loss": 0.8866,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846451729878357e-05,
+ "loss": 0.9572,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846119728668807e-05,
+ "loss": 0.9419,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845787398836104e-05,
+ "loss": 0.9387,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8454547405093212e-05,
+ "loss": 0.9377,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8451217538176597e-05,
+ "loss": 0.9553,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.844788438890447e-05,
+ "loss": 0.986,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444547958571396e-05,
+ "loss": 0.3361,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.84412082484732e-05,
+ "loss": 0.3385,
+ "step": 1064
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8437865259906987e-05,
+ "loss": 0.9415,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8434518994171136e-05,
+ "loss": 0.9397,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84311694525653e-05,
+ "loss": 0.9054,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84278166363904e-05,
+ "loss": 0.8955,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8424460546948632e-05,
+ "loss": 0.9017,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8421101185543463e-05,
+ "loss": 0.92,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841773855347963e-05,
+ "loss": 0.9055,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841437265206314e-05,
+ "loss": 0.8364,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841100348260127e-05,
+ "loss": 0.9515,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.840763104640257e-05,
+ "loss": 0.9436,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8404255344776853e-05,
+ "loss": 0.9395,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84008763790352e-05,
+ "loss": 0.8926,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8397494150489963e-05,
+ "loss": 0.9672,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8394108660454766e-05,
+ "loss": 0.865,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8390719910244487e-05,
+ "loss": 0.9504,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8387327901175286e-05,
+ "loss": 0.941,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838393263456457e-05,
+ "loss": 0.9799,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838053411173103e-05,
+ "loss": 0.9256,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8377132333994606e-05,
+ "loss": 0.9193,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837372730267652e-05,
+ "loss": 0.8726,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8370319019099236e-05,
+ "loss": 0.9096,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8366907484586497e-05,
+ "loss": 0.9407,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83634927004633e-05,
+ "loss": 0.9167,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360074668055915e-05,
+ "loss": 0.9128,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8356653388691857e-05,
+ "loss": 0.8422,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353228863699922e-05,
+ "loss": 0.908,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8349801094410148e-05,
+ "loss": 0.8724,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346370082153843e-05,
+ "loss": 0.9003,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8342935828263574e-05,
+ "loss": 0.98,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339498334073166e-05,
+ "loss": 0.8614,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83360576009177e-05,
+ "loss": 0.911,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.833261363013352e-05,
+ "loss": 0.8732,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832916642305822e-05,
+ "loss": 0.8753,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832571598103066e-05,
+ "loss": 0.9483,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8322262305390948e-05,
+ "loss": 0.974,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8318805397480455e-05,
+ "loss": 0.9167,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8315345258641802e-05,
+ "loss": 0.9712,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8311881890218873e-05,
+ "loss": 0.9197,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830841529355679e-05,
+ "loss": 0.9173,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8304945470001948e-05,
+ "loss": 0.9408,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8301472420901985e-05,
+ "loss": 0.9391,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8297996147605787e-05,
+ "loss": 0.9482,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.829451665146351e-05,
+ "loss": 0.8962,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8291033933826535e-05,
+ "loss": 0.9761,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8287547996047523e-05,
+ "loss": 0.933,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8284058839480363e-05,
+ "loss": 0.8762,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8280566465480206e-05,
+ "loss": 0.9217,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8277070875403455e-05,
+ "loss": 0.3614,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8273572070607756e-05,
+ "loss": 0.9359,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8270070052451995e-05,
+ "loss": 0.9401,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8266564822296323e-05,
+ "loss": 0.9186,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.826305638150213e-05,
+ "loss": 0.321,
+ "step": 1116
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.825954473143205e-05,
+ "loss": 0.3378,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8256029873449976e-05,
+ "loss": 0.8492,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.825251180892103e-05,
+ "loss": 0.3327,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8248990539211596e-05,
+ "loss": 0.8799,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8245466065689282e-05,
+ "loss": 0.9734,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824193838972297e-05,
+ "loss": 0.9092,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823840751268275e-05,
+ "loss": 0.8317,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8234873435939987e-05,
+ "loss": 0.8746,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8231336160867275e-05,
+ "loss": 0.9396,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8227795688838446e-05,
+ "loss": 0.9045,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822425202122858e-05,
+ "loss": 0.9036,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8220705159413996e-05,
+ "loss": 0.8528,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8217155104772256e-05,
+ "loss": 0.9213,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8213601858682158e-05,
+ "loss": 0.4092,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8210045422523744e-05,
+ "loss": 0.9155,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8206485797678294e-05,
+ "loss": 0.3397,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820292298552832e-05,
+ "loss": 0.9601,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819935698745759e-05,
+ "loss": 0.8789,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8195787804851076e-05,
+ "loss": 0.9189,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8192215439095025e-05,
+ "loss": 0.8967,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8188639891576893e-05,
+ "loss": 0.8964,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8185061163685386e-05,
+ "loss": 0.8626,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.818147925681044e-05,
+ "loss": 0.9607,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8177894172343227e-05,
+ "loss": 0.9404,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.817430591167615e-05,
+ "loss": 0.9238,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8170714476202848e-05,
+ "loss": 0.9255,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8167119867318197e-05,
+ "loss": 0.3475,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81635220864183e-05,
+ "loss": 0.8938,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8159921134900486e-05,
+ "loss": 0.9342,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156317014163337e-05,
+ "loss": 0.9245,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8152709725606642e-05,
+ "loss": 0.9467,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149099270631434e-05,
+ "loss": 0.8735,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8145485650639973e-05,
+ "loss": 0.3534,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8141868867035745e-05,
+ "loss": 0.9545,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8138248921223465e-05,
+ "loss": 0.8612,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134625814609084e-05,
+ "loss": 0.3531,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8130999548599767e-05,
+ "loss": 0.9884,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8127370124603927e-05,
+ "loss": 0.938,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8123737544031178e-05,
+ "loss": 0.9063,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8120101808292373e-05,
+ "loss": 0.9163,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81164629187996e-05,
+ "loss": 0.9941,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811282087696615e-05,
+ "loss": 0.8835,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8109175684206558e-05,
+ "loss": 0.8915,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8105527341936574e-05,
+ "loss": 0.9839,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.810187585157317e-05,
+ "loss": 0.3224,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8098221214534543e-05,
+ "loss": 0.307,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8094563432240107e-05,
+ "loss": 0.9391,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8090902506110513e-05,
+ "loss": 0.9469,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8087238437567614e-05,
+ "loss": 0.9498,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8083571228034498e-05,
+ "loss": 0.912,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807990087893546e-05,
+ "loss": 0.8633,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807622739169603e-05,
+ "loss": 0.952,
+ "step": 1168
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807255076774294e-05,
+ "loss": 0.3537,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8068871008504153e-05,
+ "loss": 0.9312,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065188115408844e-05,
+ "loss": 1.0083,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8061502089887406e-05,
+ "loss": 0.9158,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805781293337145e-05,
+ "loss": 0.8719,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8054120647293798e-05,
+ "loss": 0.8885,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050425233088496e-05,
+ "loss": 0.3342,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.80467266921908e-05,
+ "loss": 0.8902,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8043025026037178e-05,
+ "loss": 0.8875,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8039320236065314e-05,
+ "loss": 0.9133,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803561232371411e-05,
+ "loss": 0.9502,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803190129042367e-05,
+ "loss": 0.9052,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8028187137635325e-05,
+ "loss": 0.9564,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8024469866791602e-05,
+ "loss": 0.9611,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.802074947933625e-05,
+ "loss": 0.9002,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801702597671422e-05,
+ "loss": 0.3337,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8013299360371685e-05,
+ "loss": 0.8692,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8009569631756013e-05,
+ "loss": 0.9799,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8005836792315793e-05,
+ "loss": 0.8318,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.800210084350081e-05,
+ "loss": 0.9676,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799836178676207e-05,
+ "loss": 0.9258,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799461962355178e-05,
+ "loss": 0.9595,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7990874355323345e-05,
+ "loss": 0.9038,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7987125983531393e-05,
+ "loss": 0.8838,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7983374509631742e-05,
+ "loss": 0.8658,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7979619935081424e-05,
+ "loss": 0.9302,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797586226133867e-05,
+ "loss": 0.8896,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7972101489862924e-05,
+ "loss": 0.9137,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7968337622114824e-05,
+ "loss": 0.8774,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7964570659556206e-05,
+ "loss": 0.9242,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796080060365012e-05,
+ "loss": 0.9346,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7957027455860815e-05,
+ "loss": 0.944,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795325121765373e-05,
+ "loss": 0.8704,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.794947189049552e-05,
+ "loss": 0.3457,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7945689475854033e-05,
+ "loss": 0.9214,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941903975198305e-05,
+ "loss": 0.8942,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7938115389998595e-05,
+ "loss": 0.931,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7934323721726334e-05,
+ "loss": 0.9481,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7930528971854166e-05,
+ "loss": 0.951,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792673114185593e-05,
+ "loss": 0.9125,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7922930233206656e-05,
+ "loss": 0.937,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7919126247382576e-05,
+ "loss": 0.9068,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.791531918586112e-05,
+ "loss": 0.896,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7911509050120892e-05,
+ "loss": 0.97,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7907695841641716e-05,
+ "loss": 0.892,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7903879561904597e-05,
+ "loss": 0.9074,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.790006021239173e-05,
+ "loss": 0.9188,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789623779458651e-05,
+ "loss": 0.3391,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789241230997352e-05,
+ "loss": 1.015,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7888583760038534e-05,
+ "loss": 0.8729,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7884752146268513e-05,
+ "loss": 0.8719,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7880917470151614e-05,
+ "loss": 0.9325,
+ "step": 1220
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7877079733177185e-05,
+ "loss": 0.9018,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7873238936835754e-05,
+ "loss": 0.8804,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786939508261904e-05,
+ "loss": 0.9016,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786554817201996e-05,
+ "loss": 0.9105,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.78616982065326e-05,
+ "loss": 0.3369,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.785784518765225e-05,
+ "loss": 0.8996,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7853989116875373e-05,
+ "loss": 0.3376,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7850129995699626e-05,
+ "loss": 0.8681,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846267825623843e-05,
+ "loss": 0.9937,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7842402608148053e-05,
+ "loss": 0.9595,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838534344773453e-05,
+ "loss": 0.8766,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7834663037002444e-05,
+ "loss": 0.921,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830788686338586e-05,
+ "loss": 0.9094,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7826911294286636e-05,
+ "loss": 0.8882,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.782303086235253e-05,
+ "loss": 0.9083,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781914739204338e-05,
+ "loss": 0.7964,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7815260884867486e-05,
+ "loss": 0.9136,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781137134233432e-05,
+ "loss": 0.9764,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7807478765954532e-05,
+ "loss": 0.9434,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7803583157239958e-05,
+ "loss": 0.9205,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7799684517703605e-05,
+ "loss": 0.8635,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779578284885966e-05,
+ "loss": 0.3322,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779187815222349e-05,
+ "loss": 0.8767,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778797042931163e-05,
+ "loss": 0.902,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7784059681641798e-05,
+ "loss": 0.9385,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778014591073288e-05,
+ "loss": 0.934,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777622911810494e-05,
+ "loss": 0.8906,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777230930527922e-05,
+ "loss": 0.8875,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7768386473778124e-05,
+ "loss": 0.9189,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7764460625125236e-05,
+ "loss": 0.8831,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776053176084531e-05,
+ "loss": 0.9489,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7756599882464274e-05,
+ "loss": 0.9754,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7752664991509224e-05,
+ "loss": 0.8612,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7748727089508423e-05,
+ "loss": 0.8817,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7744786177991307e-05,
+ "loss": 0.3096,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.774084225848849e-05,
+ "loss": 0.8883,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.773689533253173e-05,
+ "loss": 0.8886,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732945401653978e-05,
+ "loss": 0.8891,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7728992467389342e-05,
+ "loss": 0.9199,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7725036531273087e-05,
+ "loss": 0.9217,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7721077594841663e-05,
+ "loss": 0.9471,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.771711565963267e-05,
+ "loss": 0.8964,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7713150727184878e-05,
+ "loss": 0.9179,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770918279903822e-05,
+ "loss": 0.8938,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.77052118767338e-05,
+ "loss": 0.8972,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7701237961813874e-05,
+ "loss": 0.9037,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7697261055821864e-05,
+ "loss": 0.9953,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7693281160302354e-05,
+ "loss": 0.916,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7689298276801095e-05,
+ "loss": 0.8463,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7685312406864986e-05,
+ "loss": 0.9613,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7681323552042094e-05,
+ "loss": 0.34,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.767733171388165e-05,
+ "loss": 0.9194,
+ "step": 1272
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7673336893934033e-05,
+ "loss": 0.8789,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7669339093750786e-05,
+ "loss": 0.872,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.766533831488461e-05,
+ "loss": 0.9512,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7661334558889357e-05,
+ "loss": 0.8791,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7657327827320046e-05,
+ "loss": 0.8505,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765331812173284e-05,
+ "loss": 0.9258,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7649305443685068e-05,
+ "loss": 0.8811,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76452897947352e-05,
+ "loss": 0.8763,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7641271176442876e-05,
+ "loss": 0.8905,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7637249590368878e-05,
+ "loss": 0.9268,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763322503807514e-05,
+ "loss": 0.9025,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7629197521124758e-05,
+ "loss": 0.8868,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7625167041081967e-05,
+ "loss": 0.9169,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7621133599512163e-05,
+ "loss": 0.8898,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761709719798189e-05,
+ "loss": 0.9608,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761305783805883e-05,
+ "loss": 0.9333,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7609015521311836e-05,
+ "loss": 0.3426,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7604970249310893e-05,
+ "loss": 0.8983,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600922023627137e-05,
+ "loss": 0.8992,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759687084583285e-05,
+ "loss": 0.927,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759281671750147e-05,
+ "loss": 0.9638,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7588759640207564e-05,
+ "loss": 0.3531,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7584699615526857e-05,
+ "loss": 0.9061,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7580636645036224e-05,
+ "loss": 0.9489,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757657073031367e-05,
+ "loss": 0.985,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7572501872938343e-05,
+ "loss": 0.3491,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.756843007449055e-05,
+ "loss": 0.9456,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7564355336551727e-05,
+ "loss": 0.8545,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7560277660704455e-05,
+ "loss": 0.3479,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.755619704853246e-05,
+ "loss": 0.8946,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7552113501620595e-05,
+ "loss": 0.8977,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7548027021554874e-05,
+ "loss": 0.9164,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.754393760992243e-05,
+ "loss": 0.9089,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7539845268311548e-05,
+ "loss": 0.9209,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7535749998311645e-05,
+ "loss": 0.9072,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.753165180151328e-05,
+ "loss": 0.3462,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752755067950814e-05,
+ "loss": 0.9508,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752344663388906e-05,
+ "loss": 0.9197,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7519339666249997e-05,
+ "loss": 0.8936,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515229778186052e-05,
+ "loss": 0.897,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7511116971293463e-05,
+ "loss": 0.8872,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7507001247169587e-05,
+ "loss": 0.9111,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7502882607412933e-05,
+ "loss": 0.9244,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749876105362313e-05,
+ "loss": 0.9142,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7494636587400942e-05,
+ "loss": 0.9361,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749050921034826e-05,
+ "loss": 0.9259,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7486378924068123e-05,
+ "loss": 0.9613,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.748224573016467e-05,
+ "loss": 0.9206,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7478109630243195e-05,
+ "loss": 0.8995,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.747397062591011e-05,
+ "loss": 0.8912,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.746982871877296e-05,
+ "loss": 0.9132,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7465683910440405e-05,
+ "loss": 0.3367,
+ "step": 1324
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7461536202522248e-05,
+ "loss": 0.8463,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.745738559662941e-05,
+ "loss": 0.8797,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7453232094373936e-05,
+ "loss": 0.9016,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7449075697369005e-05,
+ "loss": 0.9495,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7444916407228904e-05,
+ "loss": 0.8987,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.744075422556906e-05,
+ "loss": 0.8992,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7436589154006014e-05,
+ "loss": 0.991,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.743242119415743e-05,
+ "loss": 0.8775,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7428250347642102e-05,
+ "loss": 0.8728,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7424076616079933e-05,
+ "loss": 0.9342,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7419900001091953e-05,
+ "loss": 0.9245,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7415720504300314e-05,
+ "loss": 0.9226,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.741153812732828e-05,
+ "loss": 0.9372,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7407352871800246e-05,
+ "loss": 0.8907,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403164739341708e-05,
+ "loss": 0.911,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739897373157929e-05,
+ "loss": 0.8968,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394779850140736e-05,
+ "loss": 0.938,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7390583096654895e-05,
+ "loss": 0.8963,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7386383472751745e-05,
+ "loss": 0.9017,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7382180980062365e-05,
+ "loss": 0.9331,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377975620218954e-05,
+ "loss": 0.9107,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7373767394854836e-05,
+ "loss": 0.9292,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7369556305604422e-05,
+ "loss": 0.9282,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.736534235410326e-05,
+ "loss": 0.8801,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7361125541988e-05,
+ "loss": 0.8528,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7356905870896407e-05,
+ "loss": 0.9513,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.735268334246734e-05,
+ "loss": 0.878,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7348457958340792e-05,
+ "loss": 0.3268,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7344229720157846e-05,
+ "loss": 0.8879,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7339998629560705e-05,
+ "loss": 0.9062,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7335764688192676e-05,
+ "loss": 0.9827,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733152789769817e-05,
+ "loss": 1.0078,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7327288259722714e-05,
+ "loss": 0.9629,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7323045775912927e-05,
+ "loss": 0.9111,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7318800447916543e-05,
+ "loss": 0.94,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7314552277382403e-05,
+ "loss": 0.9217,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7310301265960446e-05,
+ "loss": 0.8825,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7306047415301706e-05,
+ "loss": 0.8768,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7301790727058344e-05,
+ "loss": 0.8997,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7297531202883598e-05,
+ "loss": 0.9012,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7293268844431826e-05,
+ "loss": 0.8939,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289003653358472e-05,
+ "loss": 0.9494,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7284735631320093e-05,
+ "loss": 0.8624,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280464779974335e-05,
+ "loss": 0.9329,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7276191100979952e-05,
+ "loss": 0.8958,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271914595996784e-05,
+ "loss": 0.8791,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7267635266685782e-05,
+ "loss": 0.9476,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7263353114708993e-05,
+ "loss": 0.963,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7259068141729542e-05,
+ "loss": 0.8908,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7254780349411677e-05,
+ "loss": 0.3562,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7250489739420718e-05,
+ "loss": 0.969,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7246196313423095e-05,
+ "loss": 0.3459,
+ "step": 1376
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7241900073086318e-05,
+ "loss": 0.9044,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7237601020079003e-05,
+ "loss": 0.8814,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7233299156070852e-05,
+ "loss": 0.9551,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7228994482732653e-05,
+ "loss": 0.9468,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.72246870017363e-05,
+ "loss": 0.9222,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7220376714754766e-05,
+ "loss": 0.8468,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7216063623462112e-05,
+ "loss": 0.8935,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7211747729533504e-05,
+ "loss": 0.9338,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7207429034645176e-05,
+ "loss": 0.8834,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720310754047446e-05,
+ "loss": 0.9381,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719878324869978e-05,
+ "loss": 0.896,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7194456161000634e-05,
+ "loss": 0.9337,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719012627905762e-05,
+ "loss": 0.8796,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.718579360455241e-05,
+ "loss": 0.3521,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7181458139167767e-05,
+ "loss": 0.8592,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7177119884587536e-05,
+ "loss": 0.8868,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717277884249664e-05,
+ "loss": 0.9154,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716843501458109e-05,
+ "loss": 0.3666,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716408840252799e-05,
+ "loss": 0.895,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7159739008025503e-05,
+ "loss": 0.9729,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155386832762892e-05,
+ "loss": 0.3205,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.715103187843048e-05,
+ "loss": 0.945,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146674146719688e-05,
+ "loss": 0.9108,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7142313639323012e-05,
+ "loss": 0.8483,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137950357934017e-05,
+ "loss": 0.9048,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7133584304247354e-05,
+ "loss": 0.8964,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7129215479958747e-05,
+ "loss": 0.8927,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7124843886765e-05,
+ "loss": 0.898,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712046952636398e-05,
+ "loss": 0.8874,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7116092400454655e-05,
+ "loss": 0.8855,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7111712510737035e-05,
+ "loss": 0.8747,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7107329858912226e-05,
+ "loss": 0.8969,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7102944446682393e-05,
+ "loss": 0.9312,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.709855627575079e-05,
+ "loss": 0.9206,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7094165347821724e-05,
+ "loss": 0.9285,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7089771664600584e-05,
+ "loss": 0.872,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.708537522779382e-05,
+ "loss": 0.9077,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7080976039108964e-05,
+ "loss": 0.3426,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7076574100254614e-05,
+ "loss": 0.9247,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707216941294042e-05,
+ "loss": 0.8963,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706776197887712e-05,
+ "loss": 0.8499,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7063351799776514e-05,
+ "loss": 0.9228,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7058938877351456e-05,
+ "loss": 0.887,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.705452321331588e-05,
+ "loss": 0.8649,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7050104809384774e-05,
+ "loss": 0.302,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70456836672742e-05,
+ "loss": 0.876,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.704125978870128e-05,
+ "loss": 0.8863,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7036833175384192e-05,
+ "loss": 0.8995,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7032403829042182e-05,
+ "loss": 0.3592,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7027971751395563e-05,
+ "loss": 0.3541,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7023536944165697e-05,
+ "loss": 0.9257,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7019099409075014e-05,
+ "loss": 0.8985,
+ "step": 1428
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7014659147847005e-05,
+ "loss": 0.9004,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701021616220621e-05,
+ "loss": 0.9354,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7005770453878234e-05,
+ "loss": 0.978,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7001322024589742e-05,
+ "loss": 0.9114,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6996870876068455e-05,
+ "loss": 0.9199,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6992417010043144e-05,
+ "loss": 0.8406,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6987960428243637e-05,
+ "loss": 0.8679,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6983501132400825e-05,
+ "loss": 0.9248,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6979039124246643e-05,
+ "loss": 1.0086,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6974574405514083e-05,
+ "loss": 0.3541,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6970106977937192e-05,
+ "loss": 0.9326,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.696563684325107e-05,
+ "loss": 0.3749,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6961164003191862e-05,
+ "loss": 0.9212,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6956688459496767e-05,
+ "loss": 0.9714,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695221021390404e-05,
+ "loss": 0.8775,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6947729268152972e-05,
+ "loss": 0.9413,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6943245623983918e-05,
+ "loss": 0.97,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6938759283138268e-05,
+ "loss": 0.8966,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.693427024735847e-05,
+ "loss": 0.9482,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.692977851838801e-05,
+ "loss": 0.9045,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925284097971427e-05,
+ "loss": 0.9114,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6920786987854296e-05,
+ "loss": 0.9462,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691628718978325e-05,
+ "loss": 0.9369,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691178470550596e-05,
+ "loss": 0.9344,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6907279536771127e-05,
+ "loss": 0.9304,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6902771685328524e-05,
+ "loss": 0.8722,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6898261152928933e-05,
+ "loss": 0.9627,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6893747941324197e-05,
+ "loss": 0.9426,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6889232052267203e-05,
+ "loss": 0.883,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688471348751186e-05,
+ "loss": 0.8001,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688019224881313e-05,
+ "loss": 0.9631,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6875668337927014e-05,
+ "loss": 0.8921,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6871141756610544e-05,
+ "loss": 0.8152,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6866612506621788e-05,
+ "loss": 0.8897,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6862080589719863e-05,
+ "loss": 0.8766,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6857546007664908e-05,
+ "loss": 0.9122,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6853008762218103e-05,
+ "loss": 0.9693,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684846885514166e-05,
+ "loss": 0.9323,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6843926288198828e-05,
+ "loss": 0.3472,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683938106315389e-05,
+ "loss": 0.8746,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683483318177216e-05,
+ "loss": 0.3501,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6830282645819974e-05,
+ "loss": 0.9011,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6825729457064718e-05,
+ "loss": 0.9537,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6821173617274793e-05,
+ "loss": 0.8521,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6816615128219635e-05,
+ "loss": 0.8628,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.681205399166971e-05,
+ "loss": 0.876,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6807490209396506e-05,
+ "loss": 0.8959,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6802923783172553e-05,
+ "loss": 0.9094,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.679835471477139e-05,
+ "loss": 0.8905,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793783005967593e-05,
+ "loss": 0.9005,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678920865853676e-05,
+ "loss": 0.3728,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678463167425552e-05,
+ "loss": 0.9065,
+ "step": 1480
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6780052054901512e-05,
+ "loss": 0.8991,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6775469802253416e-05,
+ "loss": 0.8868,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6770884918090923e-05,
+ "loss": 0.8659,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6766297404194745e-05,
+ "loss": 0.9002,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6761707262346624e-05,
+ "loss": 0.8773,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675711449432932e-05,
+ "loss": 0.9197,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6752519101926606e-05,
+ "loss": 0.8743,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6747921086923284e-05,
+ "loss": 0.8821,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674332045110517e-05,
+ "loss": 0.9071,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6738717196259092e-05,
+ "loss": 0.9039,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673411132417291e-05,
+ "loss": 0.9037,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672950283663548e-05,
+ "loss": 0.9522,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6724891735436697e-05,
+ "loss": 0.9196,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6720278022367453e-05,
+ "loss": 0.8583,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6715661699219664e-05,
+ "loss": 0.9125,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6711042767786257e-05,
+ "loss": 0.8885,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6706421229861168e-05,
+ "loss": 0.8732,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6701797087239354e-05,
+ "loss": 0.351,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6697170341716772e-05,
+ "loss": 0.8356,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6692540995090403e-05,
+ "loss": 0.9195,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.668790904915823e-05,
+ "loss": 0.8917,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6683274505719248e-05,
+ "loss": 0.8966,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6678637366573455e-05,
+ "loss": 0.9046,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.667399763352187e-05,
+ "loss": 0.9207,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.666935530836651e-05,
+ "loss": 0.9575,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6664710392910396e-05,
+ "loss": 0.815,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6660062888957564e-05,
+ "loss": 0.9449,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665541279831305e-05,
+ "loss": 0.8779,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650760122782898e-05,
+ "loss": 0.8118,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6646104864174147e-05,
+ "loss": 0.9139,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664144702429485e-05,
+ "loss": 0.9026,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663678660495406e-05,
+ "loss": 0.8231,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663212360796183e-05,
+ "loss": 0.926,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662745803512921e-05,
+ "loss": 0.9112,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662278988826826e-05,
+ "loss": 0.9311,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6618119169192027e-05,
+ "loss": 0.9262,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.661344587971457e-05,
+ "loss": 0.985,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6608770021650945e-05,
+ "loss": 0.9365,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6604091596817193e-05,
+ "loss": 0.904,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6599410607030363e-05,
+ "loss": 0.9535,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6594727054108498e-05,
+ "loss": 0.9517,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.659004093987064e-05,
+ "loss": 0.9498,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6585352266136814e-05,
+ "loss": 0.9883,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6580661034728055e-05,
+ "loss": 0.8979,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6575967247466376e-05,
+ "loss": 0.8531,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.657127090617479e-05,
+ "loss": 0.8912,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656657201267731e-05,
+ "loss": 0.9086,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6561870568798927e-05,
+ "loss": 0.9344,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.655716657636562e-05,
+ "loss": 0.8746,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6552460037204382e-05,
+ "loss": 0.9204,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6547750953143168e-05,
+ "loss": 0.8635,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.654303932601093e-05,
+ "loss": 0.921,
+ "step": 1532
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6538325157637614e-05,
+ "loss": 0.9079,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.653360844985415e-05,
+ "loss": 0.3378,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.652888920449245e-05,
+ "loss": 0.9322,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6524167423385414e-05,
+ "loss": 0.9674,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.651944310836693e-05,
+ "loss": 0.9316,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6514716261271866e-05,
+ "loss": 0.8693,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6509986883936073e-05,
+ "loss": 0.8552,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.650525497819639e-05,
+ "loss": 0.3403,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6500520545890634e-05,
+ "loss": 0.9273,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6495783588857605e-05,
+ "loss": 0.9329,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.649104410893708e-05,
+ "loss": 0.334,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.648630210796982e-05,
+ "loss": 0.8976,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6481557587797562e-05,
+ "loss": 0.9389,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6476810550263023e-05,
+ "loss": 0.9542,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6472060997209898e-05,
+ "loss": 0.8812,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6467308930482863e-05,
+ "loss": 0.9048,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6462554351927558e-05,
+ "loss": 0.876,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6457797263390613e-05,
+ "loss": 0.8298,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6453037666719624e-05,
+ "loss": 0.9022,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6448275563763162e-05,
+ "loss": 0.8829,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.644351095637078e-05,
+ "loss": 0.8775,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6438743846392987e-05,
+ "loss": 0.9378,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6433974235681274e-05,
+ "loss": 0.9564,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6429202126088112e-05,
+ "loss": 0.8495,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6424427519466925e-05,
+ "loss": 0.9205,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641965041767212e-05,
+ "loss": 0.8454,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6414870822559064e-05,
+ "loss": 0.8951,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6410088735984103e-05,
+ "loss": 0.9077,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405304159804534e-05,
+ "loss": 0.931,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6400517095878644e-05,
+ "loss": 0.9085,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6395727546065665e-05,
+ "loss": 0.9335,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6390935512225806e-05,
+ "loss": 0.9383,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6386140996220232e-05,
+ "loss": 0.9486,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6381343999911088e-05,
+ "loss": 0.9645,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6376544525161463e-05,
+ "loss": 0.942,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6371742573835426e-05,
+ "loss": 0.8543,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.636693814779799e-05,
+ "loss": 0.9479,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6362131248915145e-05,
+ "loss": 0.8565,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6357321879053833e-05,
+ "loss": 0.9377,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6352510040081962e-05,
+ "loss": 0.9424,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634769573386839e-05,
+ "loss": 0.8644,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634287896228294e-05,
+ "loss": 0.8406,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6338059727196386e-05,
+ "loss": 0.9085,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6333238030480473e-05,
+ "loss": 0.9058,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6328413874007884e-05,
+ "loss": 0.942,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6323587259652267e-05,
+ "loss": 0.8775,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6318758189288227e-05,
+ "loss": 0.9459,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6313926664791316e-05,
+ "loss": 0.9458,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6309092688038047e-05,
+ "loss": 0.9369,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6304256260905872e-05,
+ "loss": 0.917,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6299417385273216e-05,
+ "loss": 0.9081,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.629457606301943e-05,
+ "loss": 0.3765,
+ "step": 1584
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6289732296024837e-05,
+ "loss": 0.9892,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6284886086170697e-05,
+ "loss": 0.9082,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.628003743533922e-05,
+ "loss": 0.3439,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6275186345413566e-05,
+ "loss": 0.9259,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627033281827785e-05,
+ "loss": 0.362,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265476855817116e-05,
+ "loss": 0.8515,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6260618459917366e-05,
+ "loss": 0.9138,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255757632465553e-05,
+ "loss": 0.9615,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.625089437534956e-05,
+ "loss": 0.9091,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624602869045822e-05,
+ "loss": 0.9077,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624116057968131e-05,
+ "loss": 0.9218,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6236290044909543e-05,
+ "loss": 1.0128,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6231417088034585e-05,
+ "loss": 0.9007,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.622654171094904e-05,
+ "loss": 0.3385,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6221663915546437e-05,
+ "loss": 0.9356,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6216783703721265e-05,
+ "loss": 0.9317,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6211901077368937e-05,
+ "loss": 0.8909,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.620701603838581e-05,
+ "loss": 0.9236,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6202128588669177e-05,
+ "loss": 0.8958,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619723873011727e-05,
+ "loss": 0.866,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6192346464629247e-05,
+ "loss": 0.8925,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6187451794105212e-05,
+ "loss": 0.851,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61825547204462e-05,
+ "loss": 0.879,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6177655245554177e-05,
+ "loss": 0.8873,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617275337133204e-05,
+ "loss": 0.8709,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6167849099683623e-05,
+ "loss": 0.8851,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6162942432513687e-05,
+ "loss": 0.9548,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6158033371727924e-05,
+ "loss": 0.9119,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153121919232962e-05,
+ "loss": 0.8921,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.614820807693635e-05,
+ "loss": 0.9396,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143291846746563e-05,
+ "loss": 0.9238,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613837323057301e-05,
+ "loss": 0.993,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6133452230326035e-05,
+ "loss": 0.8919,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6128528847916883e-05,
+ "loss": 0.8905,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123603085257746e-05,
+ "loss": 0.9203,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6118674944261732e-05,
+ "loss": 0.8348,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6113744426842882e-05,
+ "loss": 0.8634,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6108811534916137e-05,
+ "loss": 0.9083,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6103876270397387e-05,
+ "loss": 0.9459,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609893863520343e-05,
+ "loss": 0.8999,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609399863125198e-05,
+ "loss": 0.9203,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6089056260461687e-05,
+ "loss": 0.8976,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6084111524752107e-05,
+ "loss": 0.888,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607916442604372e-05,
+ "loss": 0.9126,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6074214966257914e-05,
+ "loss": 0.9084,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6069263147317015e-05,
+ "loss": 0.856,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6064308971144236e-05,
+ "loss": 0.9364,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605935243966374e-05,
+ "loss": 0.8824,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6054393554800574e-05,
+ "loss": 0.8943,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604943231848072e-05,
+ "loss": 0.9126,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604446873263106e-05,
+ "loss": 0.9869,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6039502799179394e-05,
+ "loss": 0.9221,
+ "step": 1636
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6034534520054435e-05,
+ "loss": 0.9395,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.60295638971858e-05,
+ "loss": 0.9287,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.602459093250403e-05,
+ "loss": 0.946,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601961562794056e-05,
+ "loss": 0.8648,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601463798542775e-05,
+ "loss": 0.3325,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6009658006898848e-05,
+ "loss": 0.8383,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600467569428803e-05,
+ "loss": 0.85,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599969104953036e-05,
+ "loss": 0.9851,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599470407456182e-05,
+ "loss": 0.8856,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989714771319297e-05,
+ "loss": 0.8561,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5984723141740578e-05,
+ "loss": 0.91,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.597972918776435e-05,
+ "loss": 0.9533,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5974732911330208e-05,
+ "loss": 0.9079,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5969734314378654e-05,
+ "loss": 0.8686,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5964733398851078e-05,
+ "loss": 0.9388,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5959730166689783e-05,
+ "loss": 0.9022,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5954724619837966e-05,
+ "loss": 0.869,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5949716760239722e-05,
+ "loss": 0.9018,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5944706589840046e-05,
+ "loss": 0.8919,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5939694110584833e-05,
+ "loss": 0.9298,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593467932442087e-05,
+ "loss": 0.8993,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5929662233295846e-05,
+ "loss": 0.8597,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5924642839158334e-05,
+ "loss": 0.9543,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.591962114395781e-05,
+ "loss": 0.8902,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5914597149644654e-05,
+ "loss": 0.9517,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5909570858170115e-05,
+ "loss": 0.8964,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5904542271486346e-05,
+ "loss": 0.8226,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5899511391546403e-05,
+ "loss": 0.9308,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5894478220304215e-05,
+ "loss": 0.8725,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5889442759714603e-05,
+ "loss": 0.357,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5884405011733294e-05,
+ "loss": 0.8884,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.587936497831688e-05,
+ "loss": 0.8428,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874322661422856e-05,
+ "loss": 0.8942,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5869278063009602e-05,
+ "loss": 0.9476,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.586423118503638e-05,
+ "loss": 0.8257,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.585918202946334e-05,
+ "loss": 0.9185,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5854130598251514e-05,
+ "loss": 0.9394,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5849076893362822e-05,
+ "loss": 0.9399,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.584402091676006e-05,
+ "loss": 0.9126,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5838962670406918e-05,
+ "loss": 0.9149,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5833902156267956e-05,
+ "loss": 0.938,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.582883937630862e-05,
+ "loss": 0.9592,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5823774332495236e-05,
+ "loss": 0.9069,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581870702679501e-05,
+ "loss": 0.918,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581363746117602e-05,
+ "loss": 0.8892,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580856563760724e-05,
+ "loss": 0.349,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5803491558058486e-05,
+ "loss": 0.9282,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579841522450049e-05,
+ "loss": 0.8366,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5793336638904838e-05,
+ "loss": 0.9711,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.578825580324399e-05,
+ "loss": 0.9039,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5783172719491288e-05,
+ "loss": 0.8891,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577808738962094e-05,
+ "loss": 0.843,
+ "step": 1688
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577299981560803e-05,
+ "loss": 0.3497,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5767909999428513e-05,
+ "loss": 0.9345,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.576281794305922e-05,
+ "loss": 0.93,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575772364847784e-05,
+ "loss": 0.9059,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575262711766294e-05,
+ "loss": 0.8892,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5747528352593956e-05,
+ "loss": 0.8448,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.574242735525119e-05,
+ "loss": 0.8748,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737324127615808e-05,
+ "loss": 0.9469,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5732218671669847e-05,
+ "loss": 0.9469,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727110989396205e-05,
+ "loss": 0.8397,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5722001082778645e-05,
+ "loss": 0.982,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716888953801805e-05,
+ "loss": 0.9181,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5711774604451168e-05,
+ "loss": 0.85,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706658036713093e-05,
+ "loss": 0.3376,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5701539252574795e-05,
+ "loss": 0.362,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5696418254024344e-05,
+ "loss": 0.8803,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.569129504305069e-05,
+ "loss": 0.904,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568616962164362e-05,
+ "loss": 0.8772,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5681041991793788e-05,
+ "loss": 0.8893,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567591215549271e-05,
+ "loss": 0.9031,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567078011473276e-05,
+ "loss": 0.868,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5665645871507152e-05,
+ "loss": 0.841,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5660509427809973e-05,
+ "loss": 0.8552,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565537078563616e-05,
+ "loss": 0.8433,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56502299469815e-05,
+ "loss": 0.8928,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564508691384264e-05,
+ "loss": 0.9244,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5639941688217063e-05,
+ "loss": 0.8972,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5634794272103126e-05,
+ "loss": 0.8691,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562964466750003e-05,
+ "loss": 0.898,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562449287640781e-05,
+ "loss": 0.9465,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5619338900827368e-05,
+ "loss": 0.8966,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5614182742760448e-05,
+ "loss": 0.9137,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5609024404209643e-05,
+ "loss": 0.9303,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5603863887178393e-05,
+ "loss": 0.8798,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5598701193670983e-05,
+ "loss": 0.9252,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559353632569254e-05,
+ "loss": 0.8918,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588369285249048e-05,
+ "loss": 0.8562,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5583200074347318e-05,
+ "loss": 0.9207,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.557802869499501e-05,
+ "loss": 0.8755,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5572855149200637e-05,
+ "loss": 0.9165,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5567679438973543e-05,
+ "loss": 0.9501,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5562501566323906e-05,
+ "loss": 0.9016,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.555732153326276e-05,
+ "loss": 0.9402,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5552139341801965e-05,
+ "loss": 0.8856,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.554695499395423e-05,
+ "loss": 0.8768,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5541768491733092e-05,
+ "loss": 0.9014,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5536579837152927e-05,
+ "loss": 0.8418,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5531389032228955e-05,
+ "loss": 0.8946,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.552619607897722e-05,
+ "loss": 0.9021,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.55210009794146e-05,
+ "loss": 0.8611,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5515803735558827e-05,
+ "loss": 0.9054,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5510604349428438e-05,
+ "loss": 0.9597,
+ "step": 1740
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.550540282304282e-05,
+ "loss": 0.8706,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.550019915842218e-05,
+ "loss": 0.9222,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.549499335758757e-05,
+ "loss": 0.9049,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.548978542256086e-05,
+ "loss": 0.9216,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5484575355364744e-05,
+ "loss": 0.906,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5479363158022763e-05,
+ "loss": 0.8817,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547414883255927e-05,
+ "loss": 0.8898,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.546893238099945e-05,
+ "loss": 0.8456,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5463713805369312e-05,
+ "loss": 0.8614,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5458493107695688e-05,
+ "loss": 0.8622,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5453270290006237e-05,
+ "loss": 0.8739,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544804535432945e-05,
+ "loss": 0.9129,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544281830269462e-05,
+ "loss": 0.8913,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437589137131882e-05,
+ "loss": 0.344,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5432357859672177e-05,
+ "loss": 0.3343,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542712447234728e-05,
+ "loss": 0.9442,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542188897718977e-05,
+ "loss": 0.8802,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5416651376233062e-05,
+ "loss": 0.9224,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5411411671511376e-05,
+ "loss": 0.3322,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5406169865059747e-05,
+ "loss": 0.8669,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5400925958914045e-05,
+ "loss": 0.8587,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5395679955110927e-05,
+ "loss": 0.9005,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53904318556879e-05,
+ "loss": 0.894,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5385181662683244e-05,
+ "loss": 0.9441,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5379929378136088e-05,
+ "loss": 0.8866,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5374675004086353e-05,
+ "loss": 0.8596,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5369418542574782e-05,
+ "loss": 0.9463,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536415999564292e-05,
+ "loss": 0.9096,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5358899365333123e-05,
+ "loss": 0.9065,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5353636653688563e-05,
+ "loss": 0.9263,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.534837186275322e-05,
+ "loss": 0.9296,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5343104994571877e-05,
+ "loss": 0.9103,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.533783605119012e-05,
+ "loss": 0.9263,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5332565034654344e-05,
+ "loss": 0.8902,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5327291947011763e-05,
+ "loss": 0.8487,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5322016790310373e-05,
+ "loss": 0.9495,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5316739566598985e-05,
+ "loss": 0.8295,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.531146027792722e-05,
+ "loss": 0.8741,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.530617892634548e-05,
+ "loss": 0.879,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5300895513904993e-05,
+ "loss": 0.8716,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.529561004265777e-05,
+ "loss": 0.3501,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5290322514656624e-05,
+ "loss": 0.9305,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5285032931955177e-05,
+ "loss": 0.8902,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527974129660784e-05,
+ "loss": 0.8797,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527444761066982e-05,
+ "loss": 0.9201,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5269151876197127e-05,
+ "loss": 0.8549,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5263854095246557e-05,
+ "loss": 0.9351,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5258554269875716e-05,
+ "loss": 0.9333,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5253252402142989e-05,
+ "loss": 0.914,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5247948494107566e-05,
+ "loss": 0.9352,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5242642547829416e-05,
+ "loss": 0.9375,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.523733456536931e-05,
+ "loss": 0.8714,
+ "step": 1792
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5232024548788813e-05,
+ "loss": 0.9665,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5226712500150267e-05,
+ "loss": 0.8637,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5221398421516816e-05,
+ "loss": 0.9001,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5216082314952383e-05,
+ "loss": 0.8326,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521076418252168e-05,
+ "loss": 0.9145,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5205444026290218e-05,
+ "loss": 0.9409,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5200121848324276e-05,
+ "loss": 0.899,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5194797650690926e-05,
+ "loss": 0.893,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5189471435458032e-05,
+ "loss": 0.9221,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5184143204694231e-05,
+ "loss": 0.7968,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5178812960468945e-05,
+ "loss": 0.9386,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5173480704852379e-05,
+ "loss": 0.8234,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5168146439915525e-05,
+ "loss": 0.8813,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5162810167730144e-05,
+ "loss": 0.8989,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5157471890368785e-05,
+ "loss": 0.9244,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152131609904773e-05,
+ "loss": 0.9251,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5146789328412213e-05,
+ "loss": 0.8993,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141445047965984e-05,
+ "loss": 0.342,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5136098770641741e-05,
+ "loss": 0.9025,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.513075049851592e-05,
+ "loss": 0.3408,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5125400233665728e-05,
+ "loss": 0.8834,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5120047978169146e-05,
+ "loss": 0.8897,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5114693734104926e-05,
+ "loss": 0.8895,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5109337503552594e-05,
+ "loss": 0.9055,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5103979288592454e-05,
+ "loss": 0.8434,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5098619091305571e-05,
+ "loss": 0.9234,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5093256913773786e-05,
+ "loss": 0.7853,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50878927580797e-05,
+ "loss": 0.9126,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5082526626306698e-05,
+ "loss": 0.3308,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5077158520538921e-05,
+ "loss": 0.821,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5071788442861277e-05,
+ "loss": 0.8598,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5066416395359444e-05,
+ "loss": 0.8984,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5061042380119864e-05,
+ "loss": 0.8945,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5055666399229743e-05,
+ "loss": 0.9365,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5050288454777047e-05,
+ "loss": 0.3325,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.504490854885051e-05,
+ "loss": 0.9344,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5039526683539627e-05,
+ "loss": 0.961,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5034142860934649e-05,
+ "loss": 0.8653,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5028757083126594e-05,
+ "loss": 0.8737,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5023369352207229e-05,
+ "loss": 0.8489,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5017979670269096e-05,
+ "loss": 0.8895,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.501258803940548e-05,
+ "loss": 0.9791,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500719446171043e-05,
+ "loss": 0.853,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500179893927875e-05,
+ "loss": 0.8926,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996401474205997e-05,
+ "loss": 0.3301,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4991002068588484e-05,
+ "loss": 0.9411,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985600724523282e-05,
+ "loss": 0.9024,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4980197444108205e-05,
+ "loss": 0.9483,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4974792229441826e-05,
+ "loss": 0.9167,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4969385082623473e-05,
+ "loss": 0.9055,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4963976005753216e-05,
+ "loss": 0.8377,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4958565000931877e-05,
+ "loss": 0.9333,
+ "step": 1844
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4953152070261027e-05,
+ "loss": 0.8977,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.494773721584299e-05,
+ "loss": 0.9427,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4942320439780833e-05,
+ "loss": 0.8719,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4936901744178367e-05,
+ "loss": 0.8691,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4931481131140149e-05,
+ "loss": 0.8912,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4926058602771484e-05,
+ "loss": 0.356,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4920634161178424e-05,
+ "loss": 0.8958,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4915207808467756e-05,
+ "loss": 0.8454,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4909779546747011e-05,
+ "loss": 0.9246,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4904349378124467e-05,
+ "loss": 0.8342,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.489891730470914e-05,
+ "loss": 0.913,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4893483328610778e-05,
+ "loss": 0.8311,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.488804745193988e-05,
+ "loss": 0.8267,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4882609676807675e-05,
+ "loss": 0.8444,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4877170005326136e-05,
+ "loss": 0.8312,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4871728439607967e-05,
+ "loss": 0.9051,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4866284981766607e-05,
+ "loss": 0.9088,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4860839633916236e-05,
+ "loss": 0.8904,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4855392398171762e-05,
+ "loss": 0.9397,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.484994327664883e-05,
+ "loss": 0.8327,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4844492271463814e-05,
+ "loss": 0.9035,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4839039384733821e-05,
+ "loss": 0.8804,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4833584618576695e-05,
+ "loss": 0.8932,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4828127975111e-05,
+ "loss": 0.9195,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4822669456456031e-05,
+ "loss": 0.8813,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4817209064731819e-05,
+ "loss": 0.9215,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4811746802059115e-05,
+ "loss": 0.9449,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.48062826705594e-05,
+ "loss": 0.8442,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4800816672354876e-05,
+ "loss": 0.8994,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4795348809568477e-05,
+ "loss": 0.8873,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4789879084323858e-05,
+ "loss": 0.8457,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4784407498745394e-05,
+ "loss": 0.9257,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.477893405495819e-05,
+ "loss": 0.9089,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4773458755088068e-05,
+ "loss": 0.9125,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4767981601261567e-05,
+ "loss": 0.9097,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4762502595605957e-05,
+ "loss": 0.877,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4757021740249213e-05,
+ "loss": 0.8929,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4751539037320044e-05,
+ "loss": 0.877,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4746054488947863e-05,
+ "loss": 0.9186,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4740568097262811e-05,
+ "loss": 0.9156,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473507986439573e-05,
+ "loss": 0.8942,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4729589792478193e-05,
+ "loss": 0.9098,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4724097883642482e-05,
+ "loss": 0.9079,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4718604140021588e-05,
+ "loss": 0.8696,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471310856374922e-05,
+ "loss": 0.8806,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.470761115695979e-05,
+ "loss": 0.8841,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702111921788437e-05,
+ "loss": 0.3691,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4696610860370997e-05,
+ "loss": 0.8541,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4691107974844015e-05,
+ "loss": 0.8623,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.468560326734475e-05,
+ "loss": 0.9252,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4680096740011172e-05,
+ "loss": 0.9047,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4674588394981948e-05,
+ "loss": 0.8317,
+ "step": 1896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4669078234396454e-05,
+ "loss": 0.8868,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4663566260394775e-05,
+ "loss": 0.9246,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4658052475117704e-05,
+ "loss": 0.9418,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4652536880706723e-05,
+ "loss": 0.977,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4647019479304028e-05,
+ "loss": 0.935,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4641500273052516e-05,
+ "loss": 0.881,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.463597926409578e-05,
+ "loss": 0.8564,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4630456454578122e-05,
+ "loss": 0.9335,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.462493184664453e-05,
+ "loss": 0.8867,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4619405442440702e-05,
+ "loss": 0.8895,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4613877244113033e-05,
+ "loss": 0.8933,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4608347253808605e-05,
+ "loss": 0.3472,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.460281547367521e-05,
+ "loss": 0.9395,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4597281905861318e-05,
+ "loss": 0.9227,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4591746552516109e-05,
+ "loss": 0.3281,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4586209415789452e-05,
+ "loss": 0.8451,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4580670497831904e-05,
+ "loss": 0.9553,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4575129800794718e-05,
+ "loss": 0.9058,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4569587326829834e-05,
+ "loss": 0.9239,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564043078089891e-05,
+ "loss": 0.8449,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4558497056728205e-05,
+ "loss": 0.9244,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4552949264898795e-05,
+ "loss": 0.8445,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4547399704756348e-05,
+ "loss": 0.8401,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541848378456255e-05,
+ "loss": 0.8877,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4536295288154594e-05,
+ "loss": 0.9163,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4530740436008111e-05,
+ "loss": 0.8836,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.452518382417425e-05,
+ "loss": 0.9343,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4519625454811135e-05,
+ "loss": 0.8714,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4514065330077575e-05,
+ "loss": 0.9157,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4508503452133053e-05,
+ "loss": 0.8121,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4502939823137744e-05,
+ "loss": 0.9604,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4497374445252496e-05,
+ "loss": 0.8782,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4491807320638835e-05,
+ "loss": 0.9134,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4486238451458972e-05,
+ "loss": 0.8633,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4480667839875786e-05,
+ "loss": 0.9408,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4475095488052843e-05,
+ "loss": 0.8734,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4469521398154381e-05,
+ "loss": 0.927,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4463945572345308e-05,
+ "loss": 0.898,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4458368012791213e-05,
+ "loss": 0.3371,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4452788721658355e-05,
+ "loss": 0.9782,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4447207701113669e-05,
+ "loss": 0.8966,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4441624953324755e-05,
+ "loss": 0.9522,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4436040480459891e-05,
+ "loss": 0.9068,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.443045428468802e-05,
+ "loss": 0.8675,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4424866368178761e-05,
+ "loss": 0.8399,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441927673310239e-05,
+ "loss": 0.8841,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4413685381629855e-05,
+ "loss": 0.9024,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440809231593278e-05,
+ "loss": 0.8847,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4402497538183444e-05,
+ "loss": 0.8777,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4396901050554794e-05,
+ "loss": 0.9327,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391302855220442e-05,
+ "loss": 0.8859,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4385702954354662e-05,
+ "loss": 0.8798,
+ "step": 1948
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.438010135013239e-05,
+ "loss": 0.9352,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4374498044729225e-05,
+ "loss": 0.3367,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4368893040321428e-05,
+ "loss": 0.9483,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4363286339085915e-05,
+ "loss": 0.8883,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.435767794320027e-05,
+ "loss": 0.9052,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4352067854842724e-05,
+ "loss": 0.8696,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434645607619217e-05,
+ "loss": 0.9393,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434084260942816e-05,
+ "loss": 0.9083,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4335227456730902e-05,
+ "loss": 0.3333,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4329610620281253e-05,
+ "loss": 1.0002,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4323992102260733e-05,
+ "loss": 0.8595,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4318371904851502e-05,
+ "loss": 0.8919,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4312750030236382e-05,
+ "loss": 0.8652,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4307126480598852e-05,
+ "loss": 0.8431,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4301501258123024e-05,
+ "loss": 0.9213,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4295874364993672e-05,
+ "loss": 0.9258,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4290245803396221e-05,
+ "loss": 0.9051,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4284615575516737e-05,
+ "loss": 0.8925,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4278983683541934e-05,
+ "loss": 0.3409,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4273350129659173e-05,
+ "loss": 0.872,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4267714916056465e-05,
+ "loss": 0.8765,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.426207804492246e-05,
+ "loss": 0.8701,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4256439518446456e-05,
+ "loss": 0.3227,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4250799338818388e-05,
+ "loss": 0.8665,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.424515750822884e-05,
+ "loss": 0.8541,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4239514028869032e-05,
+ "loss": 0.9197,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4233868902930827e-05,
+ "loss": 0.8156,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4228222132606729e-05,
+ "loss": 0.8795,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4222573720089874e-05,
+ "loss": 0.9272,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216923667574042e-05,
+ "loss": 0.8261,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4211271977253653e-05,
+ "loss": 0.9354,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4205618651323753e-05,
+ "loss": 0.9077,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4199963691980027e-05,
+ "loss": 0.8562,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4194307101418805e-05,
+ "loss": 0.917,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4188648881837033e-05,
+ "loss": 0.919,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4182989035432299e-05,
+ "loss": 0.8722,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4177327564402825e-05,
+ "loss": 0.8983,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4171664470947464e-05,
+ "loss": 0.9448,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416599975726569e-05,
+ "loss": 0.9818,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4160333425557616e-05,
+ "loss": 0.8398,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4154665478023977e-05,
+ "loss": 0.8986,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4148995916866139e-05,
+ "loss": 0.8588,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.41433247442861e-05,
+ "loss": 0.81,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4137651962486472e-05,
+ "loss": 0.3643,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4131977573670499e-05,
+ "loss": 0.931,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412630158004205e-05,
+ "loss": 0.9019,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4120623983805617e-05,
+ "loss": 0.9074,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4114944787166307e-05,
+ "loss": 0.9205,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4109263992329858e-05,
+ "loss": 0.9037,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4103581601502629e-05,
+ "loss": 0.3415,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.409789761689159e-05,
+ "loss": 0.8838,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4092212040704336e-05,
+ "loss": 0.8955,
+ "step": 2000
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5197,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 500,
+ "total_flos": 5546790364643328.0,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/training_args.bin b/llava-v1.5-7b-concat-16/checkpoint-2000/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..b5632db25e85f4a6440989c9cf4e5eb45e67ccd6
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e1397f63ab71c83d4546fd5cc220108e4e3680c17b2f7501e2a09ab729de344
+size 6712
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2000/zero_to_fp32.py b/llava-v1.5-7b-concat-16/checkpoint-2000/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..c98caae31534368be22b67fc4ae906836c992a8d
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2000/zero_to_fp32.py
@@ -0,0 +1,587 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, args.output_file, tag=args.tag)
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/config.json b/llava-v1.5-7b-concat-16/checkpoint-2500/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..5b87d31c757ff1906899f1e3a1d047752a0c5005
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/config.json
@@ -0,0 +1,44 @@
+{
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "pad",
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1536,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "Leonardo6/clip-12m-16-roberta4",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/generation_config.json b/llava-v1.5-7b-concat-16/checkpoint-2500/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f686e33d0dd24a8bc304bf932f5bc12717579f0b
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/generation_config.json
@@ -0,0 +1,11 @@
+{
+ "attn_implementation": "flash_attention_2",
+ "bos_token_id": 1,
+ "do_sample": true,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "temperature": 0.9,
+ "top_p": 0.6,
+ "transformers_version": "4.37.2"
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_0_mp_rank_00_model_states.pt b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_0_mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..54acd49b7d8329115e55dd6457c914153e9ff2db
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_0_mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fdd03bdd89ef3c583d007ffc2c7a69fa2fa891c3ea5aa2d5bbf194ab20e9c598
+size 86606876
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_1_mp_rank_00_model_states.pt b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_1_mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..81f65eab752fdd14d8619a6150293e2d3fb133d5
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_1_mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:95043bea67ee177f0a76747c46b2667e00806b98e88f9aa72dc8949ae1c4cdf5
+size 86606876
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_2_mp_rank_00_model_states.pt b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_2_mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..9422a749d4c5762ac5c037451691a9cd08150d25
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_2_mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8e0e67973b9662ddbbd9e0638efad042de0fed8b938ae14938effe5b6c92b5a
+size 86606876
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_3_mp_rank_00_model_states.pt b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_3_mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b001f23b9fa0ce490fa0502172d7b5d7df3b095a
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/global_step2500/zero_pp_rank_3_mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:65198253f4afb6789eaf1aadeea0c005c5896b1a511e4dcd27877970a39968fb
+size 86606876
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/latest b/llava-v1.5-7b-concat-16/checkpoint-2500/latest
new file mode 100644
index 0000000000000000000000000000000000000000..98f8bed9a5485ee900d9931cc06950de69499848
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/latest
@@ -0,0 +1 @@
+global_step2500
\ No newline at end of file
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/model.safetensors.index.json b/llava-v1.5-7b-concat-16/checkpoint-2500/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..51cd1fe67b08db18738439b039f9eec8e67fa02f
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/model.safetensors.index.json
@@ -0,0 +1,701 @@
+{
+ "metadata": {
+ "total_size": 13867362304
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.class_embedding": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.patch_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.position_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.cls_token": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.position_embeddings": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_0.pth b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..44bb770e4c85b7b758a6b2962384781d026daabd
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af2966def51ea1ab87d97a757bd22e7f72001f21baee1a67abfc367e92e2e402
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_1.pth b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..75cd02b1fceb1b3b1aae40cf4857ce2cea6fd436
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8cf5883933ffd2749908af2fffabf58c748ecc9afbc507bfa1868172477bbf0c
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_2.pth b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..127a0ab4fb3652fab0edcb4ecc63af17870be47c
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b4f7265b828abac3132886540e39210cab53edc42ddf0389bd517ccd5c1ca42d
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_3.pth b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_3.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a696a927c56c2b5ca8cb6f3d71f9ca36a1ae9fea
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/rng_state_3.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1ebdec8d90b17c1d6090b2bc79535cba013a72aa00b297c128236362564f916
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/scheduler.pt b/llava-v1.5-7b-concat-16/checkpoint-2500/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b09d54d4ee96b5e63bcf9777b6576af1199ae68c
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:addeb49cc47cacee976545ab8d41e83df3ec0d4b6f6e50ddd845c495dd419a0c
+size 1064
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/special_tokens_map.json b/llava-v1.5-7b-concat-16/checkpoint-2500/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/tokenizer.model b/llava-v1.5-7b-concat-16/checkpoint-2500/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/tokenizer_config.json b/llava-v1.5-7b-concat-16/checkpoint-2500/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/trainer_state.json b/llava-v1.5-7b-concat-16/checkpoint-2500/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..c503d2992d363938aa40b0fe491f23ac06e67990
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/trainer_state.json
@@ -0,0 +1,15021 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.480977345967005,
+ "eval_steps": 500,
+ "global_step": 2500,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 1.437,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.4396,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.4201,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.4526,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.4033,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.4341,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.455,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.4195,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 1.4367,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.3549,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.3929,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.3577,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.3198,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.242,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.2693,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.3043,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.2034,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.1896,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.2483,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.1324,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.2191,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.1962,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.125,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.2311,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.1687,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.1214,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.1449,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.139,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.0864,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.1032,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.1475,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.0742,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.1101,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.0727,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.0478,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 1.099,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 0.3001,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.095,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 1.0828,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.0715,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.0794,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.1222,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.0315,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 1.0473,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 1.1067,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 1.0335,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 1.0009,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.0086,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0027,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 1.0066,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 1.0375,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 1.0491,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.0522,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 0.9977,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0516,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 0.3065,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 1.057,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 1.0589,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 1.0796,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 1.0433,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 0.9848,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 1.0166,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 0.9902,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 1.0357,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 0.9981,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 0.9887,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 0.9445,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0034,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 0.9865,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.0095,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 0.988,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 0.9673,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 1.0383,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 0.9842,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 0.9988,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 0.9715,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 0.9306,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 1.0179,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 1.0813,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 0.9825,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 1.0437,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 1.0863,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 1.0367,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 0.9808,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 0.9815,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 1.0001,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 1.0443,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 1.0108,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 0.2945,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 0.9728,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 1.0072,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 1.0504,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 1.0167,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 0.9801,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 0.307,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 0.9832,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 1.0245,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 1.016,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 0.9245,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 0.2833,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 0.9694,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0847,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 0.9805,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 1.0026,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 0.2628,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 0.9652,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 1.0551,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 0.9897,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 1.0074,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 0.9967,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 0.9988,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 1.07,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.9754,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 1.022,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 0.9851,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 0.2876,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 1.0329,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 0.9843,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 1.0554,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 0.9708,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 1.0252,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 1.0464,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 0.9278,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 1.0028,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 1.0319,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 0.9877,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 1.067,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.8855,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9933,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.0038,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 1.0337,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9185,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.9293,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9535,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 0.9931,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.934,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 0.9966,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 1.018,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 0.9646,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 1.0316,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 1.0237,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 1.0058,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 1.0256,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.9973,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 0.9952,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 0.9886,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 0.9792,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 0.9918,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 0.9943,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 1.0113,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 1.0034,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 0.2836,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 0.9962,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 0.9848,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9459,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 1.06,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999998058057616e-05,
+ "loss": 1.0001,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999992232231216e-05,
+ "loss": 1.0235,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999998252252306e-05,
+ "loss": 0.9819,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999968928936924e-05,
+ "loss": 0.9859,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999995145147809e-05,
+ "loss": 0.9607,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999930090153335e-05,
+ "loss": 0.9999,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999904844970963e-05,
+ "loss": 0.9986,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999987571594078e-05,
+ "loss": 0.337,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.99998427030741e-05,
+ "loss": 0.9978,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999980580638374e-05,
+ "loss": 1.0083,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999976502588403e-05,
+ "loss": 0.9703,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999720361590812e-05,
+ "loss": 0.9653,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999671813521435e-05,
+ "loss": 0.9899,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999961938169475e-05,
+ "loss": 0.9462,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999563066131124e-05,
+ "loss": 0.8944,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999502866852427e-05,
+ "loss": 1.0217,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999943878388204e-05,
+ "loss": 0.9505,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999370817244853e-05,
+ "loss": 0.9858,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999298966967264e-05,
+ "loss": 1.0156,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999223233077178e-05,
+ "loss": 1.001,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999914361560401e-05,
+ "loss": 0.9823,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999060114578682e-05,
+ "loss": 0.9295,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998972730033624e-05,
+ "loss": 0.9641,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998881462002778e-05,
+ "loss": 0.2889,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998786310521585e-05,
+ "loss": 0.9556,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998687275627008e-05,
+ "loss": 1.0336,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998584357357503e-05,
+ "loss": 0.9954,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998477555753054e-05,
+ "loss": 0.958,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998366870855134e-05,
+ "loss": 1.0338,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999825230270673e-05,
+ "loss": 0.982,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998133851352342e-05,
+ "loss": 0.3328,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998011516837974e-05,
+ "loss": 0.9857,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999788529921114e-05,
+ "loss": 0.917,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999775519852086e-05,
+ "loss": 0.2945,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999762121481767e-05,
+ "loss": 0.9773,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99974833481536e-05,
+ "loss": 0.9617,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997341598582197e-05,
+ "loss": 1.0578,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997195966158518e-05,
+ "loss": 0.9984,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997046450939122e-05,
+ "loss": 0.9619,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996893052982083e-05,
+ "loss": 1.0214,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996735772346973e-05,
+ "loss": 0.9952,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996574609094887e-05,
+ "loss": 1.0151,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996409563288404e-05,
+ "loss": 0.9638,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996240634991645e-05,
+ "loss": 0.9891,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996067824270204e-05,
+ "loss": 1.0223,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999589113119121e-05,
+ "loss": 1.0309,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995710555823277e-05,
+ "loss": 1.0079,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999552609823655e-05,
+ "loss": 0.9522,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999533775850266e-05,
+ "loss": 0.3102,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995145536694764e-05,
+ "loss": 0.9981,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994949432887512e-05,
+ "loss": 0.9842,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999474944715708e-05,
+ "loss": 0.9885,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994545579581125e-05,
+ "loss": 1.0181,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994337830238836e-05,
+ "loss": 0.9843,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994126199210897e-05,
+ "loss": 0.9757,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999391068657951e-05,
+ "loss": 0.9023,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993691292428364e-05,
+ "loss": 0.9472,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993468016842684e-05,
+ "loss": 0.9836,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999324085990918e-05,
+ "loss": 0.9871,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993009821716076e-05,
+ "loss": 1.0082,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992774902353104e-05,
+ "loss": 0.2744,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999253610191151e-05,
+ "loss": 0.3193,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999229342048404e-05,
+ "loss": 1.0274,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992046858164942e-05,
+ "loss": 0.2843,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999179641504999e-05,
+ "loss": 0.982,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991542091236438e-05,
+ "loss": 0.976,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991283886823075e-05,
+ "loss": 1.0374,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991021801910177e-05,
+ "loss": 1.0289,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999075583659954e-05,
+ "loss": 0.9761,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999048599099446e-05,
+ "loss": 0.2977,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990212265199738e-05,
+ "loss": 0.9407,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998993465932169e-05,
+ "loss": 1.0007,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989653173468137e-05,
+ "loss": 0.9877,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99893678077484e-05,
+ "loss": 0.9289,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989078562273313e-05,
+ "loss": 0.9585,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988785437155222e-05,
+ "loss": 0.9449,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988488432507963e-05,
+ "loss": 1.0345,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9988187548446895e-05,
+ "loss": 0.965,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998788278508888e-05,
+ "loss": 0.9971,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987574142552274e-05,
+ "loss": 0.9898,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987261620956964e-05,
+ "loss": 0.9984,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986945220424326e-05,
+ "loss": 1.0083,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998662494107724e-05,
+ "loss": 0.9371,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99863007830401e-05,
+ "loss": 1.024,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985972746438815e-05,
+ "loss": 1.0131,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985640831400778e-05,
+ "loss": 0.923,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998530503805491e-05,
+ "loss": 0.9833,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984965366531624e-05,
+ "loss": 0.981,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984621816962843e-05,
+ "loss": 0.9922,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984274389482005e-05,
+ "loss": 1.037,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983923084224047e-05,
+ "loss": 0.9879,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983567901325404e-05,
+ "loss": 0.9919,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983208840924028e-05,
+ "loss": 0.9303,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998284590315937e-05,
+ "loss": 0.9406,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982479088172403e-05,
+ "loss": 0.3251,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982108396105584e-05,
+ "loss": 0.9975,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981733827102884e-05,
+ "loss": 0.9497,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998135538130979e-05,
+ "loss": 0.9562,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998097305887328e-05,
+ "loss": 1.0052,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980586859941846e-05,
+ "loss": 0.9342,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998019678466548e-05,
+ "loss": 0.9237,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997980283319568e-05,
+ "loss": 0.9744,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979405005685466e-05,
+ "loss": 1.0382,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979003302289336e-05,
+ "loss": 0.9797,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997859772316331e-05,
+ "loss": 0.9955,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978188268464912e-05,
+ "loss": 0.8648,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997777493835317e-05,
+ "loss": 0.9995,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977357732988616e-05,
+ "loss": 0.9618,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976936652533288e-05,
+ "loss": 0.9682,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997651169715073e-05,
+ "loss": 0.9777,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976082867005985e-05,
+ "loss": 0.9652,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997565016226561e-05,
+ "loss": 1.0588,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997521358309766e-05,
+ "loss": 0.8892,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99747731296717e-05,
+ "loss": 0.9918,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9974328802158798e-05,
+ "loss": 0.9888,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997388060073152e-05,
+ "loss": 1.022,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973428525563948e-05,
+ "loss": 0.9987,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972972576831656e-05,
+ "loss": 0.9734,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972512754711738e-05,
+ "loss": 1.049,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997204905938278e-05,
+ "loss": 0.9298,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9971581491024873e-05,
+ "loss": 1.0159,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997111004981962e-05,
+ "loss": 0.9998,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970634735950117e-05,
+ "loss": 1.0013,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970155549600978e-05,
+ "loss": 0.9775,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969672490958304e-05,
+ "loss": 0.9639,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996918556020972e-05,
+ "loss": 0.9604,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996869475754434e-05,
+ "loss": 0.9464,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968200083152784e-05,
+ "loss": 0.9963,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9967701537227175e-05,
+ "loss": 0.9389,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996719911996115e-05,
+ "loss": 0.9534,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996669283154984e-05,
+ "loss": 0.8961,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996618267218988e-05,
+ "loss": 0.9537,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996566864207941e-05,
+ "loss": 0.9773,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9965150741418072e-05,
+ "loss": 0.9985,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964628970407018e-05,
+ "loss": 1.0109,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964103329248892e-05,
+ "loss": 0.9856,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996357381814785e-05,
+ "loss": 0.9531,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996304043730955e-05,
+ "loss": 1.0416,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962503186941143e-05,
+ "loss": 0.902,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961962067251298e-05,
+ "loss": 1.0041,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961417078450177e-05,
+ "loss": 0.3216,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996086822074945e-05,
+ "loss": 0.9695,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960315494362286e-05,
+ "loss": 1.0055,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959758899503355e-05,
+ "loss": 0.9274,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995919843638883e-05,
+ "loss": 1.0085,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958634105236395e-05,
+ "loss": 1.024,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958065906265228e-05,
+ "loss": 0.9575,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957493839696013e-05,
+ "loss": 0.931,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956917905750926e-05,
+ "loss": 1.014,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995633810465366e-05,
+ "loss": 0.9083,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99557544366294e-05,
+ "loss": 1.0107,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955166901904838e-05,
+ "loss": 0.9126,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954575500708164e-05,
+ "loss": 0.9656,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995398023326907e-05,
+ "loss": 0.95,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953381099818756e-05,
+ "loss": 0.9424,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952778100589912e-05,
+ "loss": 0.8988,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952171235816747e-05,
+ "loss": 1.0329,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9951560505734948e-05,
+ "loss": 1.0457,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950945910581718e-05,
+ "loss": 0.8971,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950327450595766e-05,
+ "loss": 0.9726,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949705126017286e-05,
+ "loss": 0.9883,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949078937087988e-05,
+ "loss": 0.987,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994844888405107e-05,
+ "loss": 0.9479,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947814967151246e-05,
+ "loss": 0.9239,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947177186634716e-05,
+ "loss": 0.9383,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946535542749187e-05,
+ "loss": 0.2709,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945890035743866e-05,
+ "loss": 1.053,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945240665869465e-05,
+ "loss": 0.9982,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9944587433378187e-05,
+ "loss": 1.0055,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994393033852374e-05,
+ "loss": 0.9182,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943269381561334e-05,
+ "loss": 1.0582,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994260456274768e-05,
+ "loss": 0.9732,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941935882340976e-05,
+ "loss": 0.9413,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994126334060094e-05,
+ "loss": 0.9014,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994058693778878e-05,
+ "loss": 0.9939,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939906674167192e-05,
+ "loss": 0.9712,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993922255000039e-05,
+ "loss": 0.9642,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993853456555408e-05,
+ "loss": 0.9423,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937842721095468e-05,
+ "loss": 1.0095,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937147016893257e-05,
+ "loss": 0.9853,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9936447453217646e-05,
+ "loss": 0.9414,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935744030340347e-05,
+ "loss": 0.9975,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935036748534555e-05,
+ "loss": 1.0131,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993432560807497e-05,
+ "loss": 0.9472,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993361060923779e-05,
+ "loss": 0.9754,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932891752300717e-05,
+ "loss": 0.9018,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932169037542947e-05,
+ "loss": 0.9971,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9931442465245164e-05,
+ "loss": 0.9472,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930712035689576e-05,
+ "loss": 0.9566,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992997774915986e-05,
+ "loss": 0.9609,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992923960594121e-05,
+ "loss": 1.0305,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9928497606320308e-05,
+ "loss": 0.9794,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992775175058535e-05,
+ "loss": 0.9911,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927002039026002e-05,
+ "loss": 0.8958,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9926248471933453e-05,
+ "loss": 0.998,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9925491049600382e-05,
+ "loss": 0.3126,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924729772320953e-05,
+ "loss": 0.9474,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923964640390846e-05,
+ "loss": 1.0056,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923195654107227e-05,
+ "loss": 0.952,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992242281376876e-05,
+ "loss": 0.9329,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921646119675606e-05,
+ "loss": 0.9567,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920865572129426e-05,
+ "loss": 1.0107,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920081171433377e-05,
+ "loss": 1.0179,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991929291789211e-05,
+ "loss": 1.0085,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918500811811778e-05,
+ "loss": 0.9612,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991770485350002e-05,
+ "loss": 0.9569,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991690504326597e-05,
+ "loss": 0.9991,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916101381420285e-05,
+ "loss": 0.9678,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9915293868275083e-05,
+ "loss": 0.9505,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9914482504143996e-05,
+ "loss": 0.9855,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9913667289342147e-05,
+ "loss": 0.9686,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991284822418616e-05,
+ "loss": 0.9202,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9912025308994146e-05,
+ "loss": 0.9758,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9911198544085723e-05,
+ "loss": 1.0149,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991036792978199e-05,
+ "loss": 1.0112,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990953346640555e-05,
+ "loss": 0.9875,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9908695154280496e-05,
+ "loss": 0.9437,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907852993732425e-05,
+ "loss": 0.9477,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990700698508842e-05,
+ "loss": 0.9369,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990615712867706e-05,
+ "loss": 0.9131,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9905303424828418e-05,
+ "loss": 1.022,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904445873874068e-05,
+ "loss": 0.9213,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9903584476147066e-05,
+ "loss": 0.9923,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902719231981975e-05,
+ "loss": 0.9728,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9901850141714843e-05,
+ "loss": 0.9763,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900977205683213e-05,
+ "loss": 0.9372,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900100424226124e-05,
+ "loss": 0.9181,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9899219797684113e-05,
+ "loss": 0.9654,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.98983353263992e-05,
+ "loss": 0.9802,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897447010714905e-05,
+ "loss": 0.9552,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989655485097624e-05,
+ "loss": 0.9995,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989565884752971e-05,
+ "loss": 0.9242,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9894759000723308e-05,
+ "loss": 1.0011,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893855310906526e-05,
+ "loss": 0.9923,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892947778430352e-05,
+ "loss": 0.9929,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892036403647256e-05,
+ "loss": 1.0172,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891121186911207e-05,
+ "loss": 0.9918,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9890202128577664e-05,
+ "loss": 0.9967,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988927922900358e-05,
+ "loss": 0.9102,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9888352488547397e-05,
+ "loss": 0.9903,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988742190756905e-05,
+ "loss": 0.9354,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9886487486429966e-05,
+ "loss": 1.0028,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9885549225493064e-05,
+ "loss": 0.9658,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9884607125122753e-05,
+ "loss": 0.94,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988366118568494e-05,
+ "loss": 0.9509,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988271140754701e-05,
+ "loss": 0.9642,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881757791077848e-05,
+ "loss": 0.9849,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9880800336647825e-05,
+ "loss": 0.9676,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987983904462881e-05,
+ "loss": 0.9746,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9878873915394154e-05,
+ "loss": 1.0209,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9877904949318704e-05,
+ "loss": 0.9741,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9876932146778796e-05,
+ "loss": 0.9492,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875955508152254e-05,
+ "loss": 0.2791,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987497503381839e-05,
+ "loss": 0.2944,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9873990724158014e-05,
+ "loss": 0.9199,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987300257955342e-05,
+ "loss": 0.96,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987201060038839e-05,
+ "loss": 0.9848,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9871014787048197e-05,
+ "loss": 0.985,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9870015139919606e-05,
+ "loss": 0.9003,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869011659390866e-05,
+ "loss": 1.0227,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9868004345851716e-05,
+ "loss": 0.9831,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9866993199693393e-05,
+ "loss": 0.9358,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98659782213086e-05,
+ "loss": 0.9757,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986495941109156e-05,
+ "loss": 1.0239,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863936769437956e-05,
+ "loss": 0.9802,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986291029674497e-05,
+ "loss": 0.9725,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986187999341128e-05,
+ "loss": 1.008,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9860845859837034e-05,
+ "loss": 0.9516,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985980789642388e-05,
+ "loss": 0.8976,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985876610357496e-05,
+ "loss": 0.9699,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9857720481694887e-05,
+ "loss": 0.9561,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856671031189765e-05,
+ "loss": 0.9651,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98556177524672e-05,
+ "loss": 0.9456,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9854560645936262e-05,
+ "loss": 1.0112,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9853499712007523e-05,
+ "loss": 0.9635,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852434951093035e-05,
+ "loss": 0.8902,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985136636360635e-05,
+ "loss": 0.9033,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985029394996248e-05,
+ "loss": 0.2969,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9849217710577945e-05,
+ "loss": 0.3144,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9848137645870745e-05,
+ "loss": 0.9346,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847053756260363e-05,
+ "loss": 0.9726,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984596604216777e-05,
+ "loss": 0.9481,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984487450401542e-05,
+ "loss": 0.9606,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9843779142227258e-05,
+ "loss": 0.8909,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9842679957228706e-05,
+ "loss": 1.0256,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9841576949446675e-05,
+ "loss": 0.9613,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984047011930956e-05,
+ "loss": 0.9351,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9839359467247243e-05,
+ "loss": 0.9766,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983824499369109e-05,
+ "loss": 1.0027,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9837126699073948e-05,
+ "loss": 0.9637,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836004583830146e-05,
+ "loss": 0.9507,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9834878648395507e-05,
+ "loss": 0.9815,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9833748893207326e-05,
+ "loss": 0.9587,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9832615318704388e-05,
+ "loss": 0.8957,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831477925326962e-05,
+ "loss": 1.0338,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98303367135168e-05,
+ "loss": 0.9901,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829191683717133e-05,
+ "loss": 0.9134,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9828042836372677e-05,
+ "loss": 0.9366,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9826890171929634e-05,
+ "loss": 0.9063,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982573369083568e-05,
+ "loss": 0.936,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824573393539984e-05,
+ "loss": 0.9738,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982340928049319e-05,
+ "loss": 0.9805,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241352147426e-05,
+ "loss": 0.9481,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9821069608956307e-05,
+ "loss": 0.9224,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819894051374917e-05,
+ "loss": 0.9435,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981871467985983e-05,
+ "loss": 0.9558,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817531494869105e-05,
+ "loss": 0.9753,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9816344496862272e-05,
+ "loss": 0.9506,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9815153686300352e-05,
+ "loss": 0.9922,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981395906364584e-05,
+ "loss": 1.007,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812760629362714e-05,
+ "loss": 0.9239,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981155838391643e-05,
+ "loss": 0.8999,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9810352327773935e-05,
+ "loss": 0.8966,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809142461403635e-05,
+ "loss": 0.8609,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9807928785275433e-05,
+ "loss": 1.0235,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980671129986071e-05,
+ "loss": 0.9631,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9805490005632323e-05,
+ "loss": 1.0053,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98042649030646e-05,
+ "loss": 0.9878,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9803035992633366e-05,
+ "loss": 1.0371,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9801803274815915e-05,
+ "loss": 1.0088,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9800566750091018e-05,
+ "loss": 0.9889,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9799326418938924e-05,
+ "loss": 0.9152,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979808228184137e-05,
+ "loss": 0.9684,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9796834339281557e-05,
+ "loss": 0.9171,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979558259174418e-05,
+ "loss": 0.9473,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794327039715395e-05,
+ "loss": 0.9039,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979306768368285e-05,
+ "loss": 0.9673,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9791804524135663e-05,
+ "loss": 0.9844,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979053756156443e-05,
+ "loss": 0.9177,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789266796461222e-05,
+ "loss": 0.9703,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9787992229319594e-05,
+ "loss": 0.9167,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978671386063457e-05,
+ "loss": 0.9837,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978543169090265e-05,
+ "loss": 0.3561,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9784145720621827e-05,
+ "loss": 0.9968,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9782855950291542e-05,
+ "loss": 0.963,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978156238041274e-05,
+ "loss": 0.9685,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9780265011487822e-05,
+ "loss": 0.9381,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9778963844020668e-05,
+ "loss": 0.9407,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977765887851664e-05,
+ "loss": 0.9165,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977635011548257e-05,
+ "loss": 0.3316,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775037555426772e-05,
+ "loss": 0.9159,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773721198859024e-05,
+ "loss": 0.9379,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9772401046290584e-05,
+ "loss": 0.9768,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9771077098234187e-05,
+ "loss": 0.9927,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9769749355204034e-05,
+ "loss": 0.9546,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976841781771581e-05,
+ "loss": 0.9958,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767082486286667e-05,
+ "loss": 0.9466,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9765743361435234e-05,
+ "loss": 0.9159,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9764400443681607e-05,
+ "loss": 0.8767,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9763053733547367e-05,
+ "loss": 1.0446,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976170323155555e-05,
+ "loss": 0.9448,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976034893823069e-05,
+ "loss": 0.9132,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975899085409876e-05,
+ "loss": 0.9193,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9757628979687247e-05,
+ "loss": 0.9665,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975626331552507e-05,
+ "loss": 0.9742,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9754893862142643e-05,
+ "loss": 0.294,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9753520620071846e-05,
+ "loss": 1.0007,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9752143589846027e-05,
+ "loss": 0.9719,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750762772000014e-05,
+ "loss": 0.9857,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9749378167070097e-05,
+ "loss": 0.9214,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9747989775594044e-05,
+ "loss": 0.9272,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974659759811109e-05,
+ "loss": 0.9527,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9745201635161938e-05,
+ "loss": 0.9597,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9743801887288762e-05,
+ "loss": 0.9602,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9742398355035212e-05,
+ "loss": 0.9721,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9740991038946404e-05,
+ "loss": 0.3203,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973957993956892e-05,
+ "loss": 0.9899,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9738165057450817e-05,
+ "loss": 0.9541,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736746393141617e-05,
+ "loss": 0.9986,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9735323947192317e-05,
+ "loss": 1.0268,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733897720155377e-05,
+ "loss": 0.9229,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9732467712584723e-05,
+ "loss": 0.9152,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973103392503576e-05,
+ "loss": 1.0084,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9729596358065347e-05,
+ "loss": 0.9658,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9728155012231825e-05,
+ "loss": 0.9228,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9726709888094994e-05,
+ "loss": 0.9909,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972526098621612e-05,
+ "loss": 0.955,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972380830715795e-05,
+ "loss": 0.9968,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9722351851484677e-05,
+ "loss": 0.9466,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9720891619761974e-05,
+ "loss": 0.9519,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9719427612556982e-05,
+ "loss": 1.0199,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9717959830438302e-05,
+ "loss": 0.9054,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9716488273976006e-05,
+ "loss": 0.9618,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971501294374162e-05,
+ "loss": 0.9405,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971353384030816e-05,
+ "loss": 0.9531,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9712050964250083e-05,
+ "loss": 0.9163,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9710564316143323e-05,
+ "loss": 0.9231,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9709073896565276e-05,
+ "loss": 0.9702,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9707579706094807e-05,
+ "loss": 0.9434,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970608174531224e-05,
+ "loss": 0.9116,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970458001479937e-05,
+ "loss": 0.9492,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9703074515139445e-05,
+ "loss": 0.951,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9701565246917184e-05,
+ "loss": 0.968,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9700052210718775e-05,
+ "loss": 0.9962,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969853540713186e-05,
+ "loss": 1.0122,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9697014836745552e-05,
+ "loss": 0.9703,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9695490500150418e-05,
+ "loss": 0.9328,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693962397938495e-05,
+ "loss": 0.97,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9692430530703282e-05,
+ "loss": 0.9872,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690894899039735e-05,
+ "loss": 1.015,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9689355503544277e-05,
+ "loss": 0.903,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968781234481479e-05,
+ "loss": 0.9144,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686265423450624e-05,
+ "loss": 0.9404,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9684714740052584e-05,
+ "loss": 0.9781,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9683160295222934e-05,
+ "loss": 0.9543,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9681602089565403e-05,
+ "loss": 0.9393,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968004012368518e-05,
+ "loss": 0.9467,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967847439818892e-05,
+ "loss": 0.8951,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9676904913684725e-05,
+ "loss": 0.9328,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967533167078217e-05,
+ "loss": 0.9344,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9673754670092283e-05,
+ "loss": 0.979,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9672173912227556e-05,
+ "loss": 0.9351,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967058939780193e-05,
+ "loss": 0.9756,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966900112743082e-05,
+ "loss": 0.886,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966740910173108e-05,
+ "loss": 0.9123,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9665813321321054e-05,
+ "loss": 0.958,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9664213786820502e-05,
+ "loss": 0.956,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9662610498850684e-05,
+ "loss": 0.928,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9661003458034288e-05,
+ "loss": 0.9454,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965939266499547e-05,
+ "loss": 0.3032,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9657778120359848e-05,
+ "loss": 0.356,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965615982475449e-05,
+ "loss": 0.9396,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9654537778807924e-05,
+ "loss": 0.9366,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9652911983150135e-05,
+ "loss": 0.9995,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965128243841256e-05,
+ "loss": 0.9299,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96496491452281e-05,
+ "loss": 1.0017,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9648012104231106e-05,
+ "loss": 0.9755,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964637131605738e-05,
+ "loss": 0.9781,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9644726781344197e-05,
+ "loss": 0.9331,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964307850073026e-05,
+ "loss": 0.9641,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964142647485576e-05,
+ "loss": 0.948,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9639770704362305e-05,
+ "loss": 0.9493,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9638111189892994e-05,
+ "loss": 0.9352,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636447932092354e-05,
+ "loss": 0.9591,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963478093160638e-05,
+ "loss": 0.359,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633110189082515e-05,
+ "loss": 0.94,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963143570516965e-05,
+ "loss": 0.9336,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629757480518144e-05,
+ "loss": 0.9057,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9628075515779796e-05,
+ "loss": 0.9284,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.962638981160786e-05,
+ "loss": 0.9744,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9624700368657045e-05,
+ "loss": 0.9535,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9623007187583518e-05,
+ "loss": 0.9348,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962131026904488e-05,
+ "loss": 0.9052,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.96196096137002e-05,
+ "loss": 0.981,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9617905222209998e-05,
+ "loss": 0.9034,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961619709523623e-05,
+ "loss": 0.9294,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9614485233442316e-05,
+ "loss": 0.9432,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961276963749313e-05,
+ "loss": 0.9437,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9611050308054982e-05,
+ "loss": 0.9222,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9609327245795642e-05,
+ "loss": 0.9645,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9607600451384327e-05,
+ "loss": 0.9591,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960586992549171e-05,
+ "loss": 0.3233,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9604135668789897e-05,
+ "loss": 0.3091,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9602397681952462e-05,
+ "loss": 0.9029,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9600655965654413e-05,
+ "loss": 0.9137,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959891052057222e-05,
+ "loss": 0.9258,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597161347383783e-05,
+ "loss": 1.0029,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959540844676847e-05,
+ "loss": 0.9326,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9593651819407084e-05,
+ "loss": 0.9146,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959189146598188e-05,
+ "loss": 0.9942,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590127387176556e-05,
+ "loss": 0.9462,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9588359583676263e-05,
+ "loss": 0.9417,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9586588056167595e-05,
+ "loss": 0.9543,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958481280533859e-05,
+ "loss": 0.9091,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958303383187874e-05,
+ "loss": 0.9614,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9581251136478974e-05,
+ "loss": 0.966,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579464719831668e-05,
+ "loss": 1.0124,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9577674582630653e-05,
+ "loss": 0.9958,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957588072557119e-05,
+ "loss": 0.9447,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957408314935e-05,
+ "loss": 0.8778,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9572281854665233e-05,
+ "loss": 0.9647,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95704768422165e-05,
+ "loss": 0.9164,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956866811270484e-05,
+ "loss": 0.9681,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9566855666832743e-05,
+ "loss": 0.9696,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9565039505304145e-05,
+ "loss": 0.9038,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956321962882442e-05,
+ "loss": 0.9858,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956139603810039e-05,
+ "loss": 0.9405,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9559568733840317e-05,
+ "loss": 0.9368,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9557737716753898e-05,
+ "loss": 0.9261,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9555902987552283e-05,
+ "loss": 0.952,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9554064546948064e-05,
+ "loss": 0.9369,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9552222395655262e-05,
+ "loss": 0.8745,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9550376534389355e-05,
+ "loss": 0.9598,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9548526963867253e-05,
+ "loss": 0.985,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9546673684807303e-05,
+ "loss": 0.9148,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95448166979293e-05,
+ "loss": 0.9259,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9542956003954477e-05,
+ "loss": 0.9543,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541091603605508e-05,
+ "loss": 0.8677,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95392234976065e-05,
+ "loss": 0.9009,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537351686683003e-05,
+ "loss": 0.9323,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9535476171562014e-05,
+ "loss": 0.9528,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533596952971955e-05,
+ "loss": 0.9111,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9531714031642698e-05,
+ "loss": 0.8794,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529827408305542e-05,
+ "loss": 0.9228,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9527937083693233e-05,
+ "loss": 0.978,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952604305853995e-05,
+ "loss": 0.9506,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9524145333581315e-05,
+ "loss": 0.994,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9522243909554375e-05,
+ "loss": 0.969,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952033878719763e-05,
+ "loss": 1.0084,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518429967251e-05,
+ "loss": 0.3728,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9516517450455853e-05,
+ "loss": 0.9258,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951460123755499e-05,
+ "loss": 0.8782,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9512681329292635e-05,
+ "loss": 0.9232,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951075772641447e-05,
+ "loss": 0.9109,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95088304296676e-05,
+ "loss": 0.9142,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950689943980056e-05,
+ "loss": 0.9372,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9504964757563322e-05,
+ "loss": 0.9459,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95030263837073e-05,
+ "loss": 0.9116,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9501084318985335e-05,
+ "loss": 0.9795,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94991385641517e-05,
+ "loss": 0.9757,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9497189119962105e-05,
+ "loss": 0.987,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9495235987173693e-05,
+ "loss": 0.8944,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949327916654504e-05,
+ "loss": 0.985,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949131865883614e-05,
+ "loss": 0.8853,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948935446480845e-05,
+ "loss": 0.91,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948738658522483e-05,
+ "loss": 0.9634,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485415020849583e-05,
+ "loss": 0.358,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9483439772448444e-05,
+ "loss": 0.8701,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481460840788573e-05,
+ "loss": 0.8917,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9479478226638565e-05,
+ "loss": 0.3685,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.947749193076845e-05,
+ "loss": 0.9397,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9475501953949674e-05,
+ "loss": 0.9149,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473508296955126e-05,
+ "loss": 0.3384,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9471510960559122e-05,
+ "loss": 0.9735,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469509945537395e-05,
+ "loss": 1.0004,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9467505252667126e-05,
+ "loss": 0.9774,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9465496882726913e-05,
+ "loss": 0.9733,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946348483649678e-05,
+ "loss": 0.9641,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461469114758184e-05,
+ "loss": 0.9397,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9459449718294008e-05,
+ "loss": 0.9411,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945742664788856e-05,
+ "loss": 0.9532,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9455399904327585e-05,
+ "loss": 0.9781,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945336948839824e-05,
+ "loss": 0.9334,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9451335400889114e-05,
+ "loss": 0.9683,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944929764259023e-05,
+ "loss": 0.3255,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9447256214293026e-05,
+ "loss": 0.9136,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9445211116790365e-05,
+ "loss": 0.9113,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9443162350876544e-05,
+ "loss": 0.9494,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944110991734728e-05,
+ "loss": 0.8912,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9439053816999717e-05,
+ "loss": 0.9126,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9436994050632415e-05,
+ "loss": 0.932,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9434930619045367e-05,
+ "loss": 0.9146,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9432863523039986e-05,
+ "loss": 0.9433,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9430792763419105e-05,
+ "loss": 0.949,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.942871834098699e-05,
+ "loss": 0.9449,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9426640256549313e-05,
+ "loss": 0.3057,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9424558510913186e-05,
+ "loss": 0.968,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422473104887133e-05,
+ "loss": 0.9604,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9420384039281103e-05,
+ "loss": 0.9836,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941829131490646e-05,
+ "loss": 0.9014,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9416194932576e-05,
+ "loss": 0.9281,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941409489310393e-05,
+ "loss": 0.8974,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9411991197305878e-05,
+ "loss": 0.9138,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9409883845998905e-05,
+ "loss": 0.955,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9407772840001473e-05,
+ "loss": 0.9367,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405658180133477e-05,
+ "loss": 0.9613,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9403539867216226e-05,
+ "loss": 0.9882,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9401417902072447e-05,
+ "loss": 0.9232,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9399292285526286e-05,
+ "loss": 1.0081,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939716301840331e-05,
+ "loss": 0.9325,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9395030101530504e-05,
+ "loss": 0.969,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939289353573626e-05,
+ "loss": 0.9948,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9390753321850404e-05,
+ "loss": 0.9315,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938860946070417e-05,
+ "loss": 0.9175,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93864619531302e-05,
+ "loss": 0.9635,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9384310799962575e-05,
+ "loss": 0.9699,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9382156002036764e-05,
+ "loss": 0.8872,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9379997560189677e-05,
+ "loss": 0.9445,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937783547525962e-05,
+ "loss": 0.9454,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9375669748086326e-05,
+ "loss": 0.9394,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937350037951094e-05,
+ "loss": 0.9521,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9371327370376018e-05,
+ "loss": 0.9497,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936915072152553e-05,
+ "loss": 0.912,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936697043380486e-05,
+ "loss": 0.3029,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936478650806081e-05,
+ "loss": 0.9439,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936259894514159e-05,
+ "loss": 0.9478,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360407745896828e-05,
+ "loss": 0.9617,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9358212911177556e-05,
+ "loss": 0.8989,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935601444183622e-05,
+ "loss": 0.9565,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935381233872669e-05,
+ "loss": 0.8481,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935160660270423e-05,
+ "loss": 0.9559,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934939723462552e-05,
+ "loss": 0.907,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9347184235348663e-05,
+ "loss": 0.9452,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9344967605733154e-05,
+ "loss": 0.8931,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934274734663991e-05,
+ "loss": 0.9234,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934052345893125e-05,
+ "loss": 0.9326,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9338295943470915e-05,
+ "loss": 0.9554,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9336064801124034e-05,
+ "loss": 0.9385,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933383003275717e-05,
+ "loss": 0.9185,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933159163923827e-05,
+ "loss": 0.947,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9329349621436708e-05,
+ "loss": 0.9201,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9327103980223255e-05,
+ "loss": 0.3265,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932485471647009e-05,
+ "loss": 0.3056,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9322601831050804e-05,
+ "loss": 0.9428,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9320345324840396e-05,
+ "loss": 0.9848,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318085198715257e-05,
+ "loss": 0.9482,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93158214535532e-05,
+ "loss": 0.9088,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9313554090233436e-05,
+ "loss": 0.9325,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9311283109636586e-05,
+ "loss": 0.9473,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309008512644668e-05,
+ "loss": 0.9608,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930673030014111e-05,
+ "loss": 1.0158,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930444847301075e-05,
+ "loss": 0.953,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9302163032139813e-05,
+ "loss": 0.9279,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9299873978415947e-05,
+ "loss": 0.9526,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9297581312728187e-05,
+ "loss": 0.9076,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528503596698e-05,
+ "loss": 0.9154,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929298514902418e-05,
+ "loss": 0.9768,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929068165279303e-05,
+ "loss": 0.9425,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928837454816818e-05,
+ "loss": 0.9292,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9286063836045687e-05,
+ "loss": 0.9183,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9283749517323e-05,
+ "loss": 0.9553,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928143159289898e-05,
+ "loss": 0.9285,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927911006367388e-05,
+ "loss": 0.8953,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927678493054935e-05,
+ "loss": 1.01,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9274456194428454e-05,
+ "loss": 0.8866,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9272123856215643e-05,
+ "loss": 0.948,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9269787916816764e-05,
+ "loss": 0.9182,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9267448377139074e-05,
+ "loss": 0.9536,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9265105238091227e-05,
+ "loss": 0.9415,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9262758500583265e-05,
+ "loss": 0.8991,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9260408165526638e-05,
+ "loss": 0.9534,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9258054233834184e-05,
+ "loss": 0.8794,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9255696706420147e-05,
+ "loss": 0.9673,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9253335584200165e-05,
+ "loss": 0.8788,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9250970868091268e-05,
+ "loss": 0.9376,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924860255901188e-05,
+ "loss": 0.8811,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246230657881834e-05,
+ "loss": 0.9476,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9243855165622345e-05,
+ "loss": 0.9362,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241476083156026e-05,
+ "loss": 0.9155,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9239093411406885e-05,
+ "loss": 0.9678,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236707151300326e-05,
+ "loss": 0.8807,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9234317303763145e-05,
+ "loss": 0.8831,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231923869723528e-05,
+ "loss": 0.3529,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922952685011106e-05,
+ "loss": 0.964,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9227126245856716e-05,
+ "loss": 0.3712,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922472205789286e-05,
+ "loss": 0.9838,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222314287153255e-05,
+ "loss": 0.299,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9219902934573048e-05,
+ "loss": 0.9794,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9217488001088784e-05,
+ "loss": 0.9706,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9215069487638396e-05,
+ "loss": 0.991,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.92126473951612e-05,
+ "loss": 0.9366,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921022172459791e-05,
+ "loss": 0.9889,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920779247689064e-05,
+ "loss": 0.9183,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9205359652982868e-05,
+ "loss": 0.9689,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9202923253819482e-05,
+ "loss": 0.9095,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920048328034675e-05,
+ "loss": 0.8812,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9198039733512326e-05,
+ "loss": 0.9137,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9195592614265262e-05,
+ "loss": 0.975,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9193141923555984e-05,
+ "loss": 0.8885,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.919068766233632e-05,
+ "loss": 0.854,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9188229831559468e-05,
+ "loss": 0.9822,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9185768432180026e-05,
+ "loss": 0.912,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9183303465153972e-05,
+ "loss": 0.9205,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9180834931438673e-05,
+ "loss": 0.9676,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917836283199288e-05,
+ "loss": 0.8845,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917588716777672e-05,
+ "loss": 0.9723,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917340793975172e-05,
+ "loss": 0.9122,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917092514888078e-05,
+ "loss": 0.9583,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9168438796128193e-05,
+ "loss": 0.9079,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9165948882459623e-05,
+ "loss": 0.8845,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9163455408842123e-05,
+ "loss": 0.9921,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160958376244138e-05,
+ "loss": 0.9166,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9158457785635478e-05,
+ "loss": 0.9785,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915595363798735e-05,
+ "loss": 0.8986,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915344593427233e-05,
+ "loss": 0.9226,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150934675464384e-05,
+ "loss": 0.8712,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9148419862538858e-05,
+ "loss": 0.9654,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145901496472474e-05,
+ "loss": 0.9771,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9143379578243335e-05,
+ "loss": 0.9436,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.914085410883093e-05,
+ "loss": 0.9894,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9138325089216118e-05,
+ "loss": 0.9068,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913579252038114e-05,
+ "loss": 0.9422,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9133256403309627e-05,
+ "loss": 0.9182,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913071673898656e-05,
+ "loss": 0.9261,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.912817352839833e-05,
+ "loss": 0.8802,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9125626772532683e-05,
+ "loss": 0.877,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9123076472378753e-05,
+ "loss": 0.9579,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9120522628927047e-05,
+ "loss": 0.9898,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9117965243169444e-05,
+ "loss": 0.9051,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9115404316099212e-05,
+ "loss": 0.9402,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9112839848710978e-05,
+ "loss": 0.9451,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9110271842000755e-05,
+ "loss": 0.3687,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9107700296965926e-05,
+ "loss": 0.9534,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910512521460525e-05,
+ "loss": 0.9271,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9102546595918857e-05,
+ "loss": 1.0075,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9099964441908258e-05,
+ "loss": 0.9131,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097378753576327e-05,
+ "loss": 0.9214,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9094789531927315e-05,
+ "loss": 0.9203,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.909219677796685e-05,
+ "loss": 0.9698,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9089600492701926e-05,
+ "loss": 0.345,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908700067714091e-05,
+ "loss": 0.3491,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9084397332293537e-05,
+ "loss": 0.9524,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081790459170926e-05,
+ "loss": 0.9527,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9079180058785547e-05,
+ "loss": 0.9158,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9076566132151255e-05,
+ "loss": 0.9576,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.907394868028326e-05,
+ "loss": 0.9538,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9071327704198163e-05,
+ "loss": 0.9251,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906870320491391e-05,
+ "loss": 0.8861,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9066075183449835e-05,
+ "loss": 0.9199,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9063443640826624e-05,
+ "loss": 0.8862,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906080857806634e-05,
+ "loss": 0.9411,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905816999619242e-05,
+ "loss": 0.955,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9055527896229642e-05,
+ "loss": 0.9152,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905288227920418e-05,
+ "loss": 0.8776,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9050233146143554e-05,
+ "loss": 0.9462,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9047580498076663e-05,
+ "loss": 1.0074,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904492433603376e-05,
+ "loss": 0.8786,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904226466104647e-05,
+ "loss": 0.9435,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903960147414778e-05,
+ "loss": 0.9668,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903693477637204e-05,
+ "loss": 0.3139,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9034264568754967e-05,
+ "loss": 0.9452,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9031590852333637e-05,
+ "loss": 0.9357,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9028913628146487e-05,
+ "loss": 0.9148,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902623289723333e-05,
+ "loss": 0.9018,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902354866063532e-05,
+ "loss": 0.9095,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020860919394992e-05,
+ "loss": 0.8821,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9018169674556228e-05,
+ "loss": 0.8456,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901547492716428e-05,
+ "loss": 0.9265,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9012776678265756e-05,
+ "loss": 0.9095,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9010074928908624e-05,
+ "loss": 0.9256,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900736968014221e-05,
+ "loss": 0.9216,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004660933017208e-05,
+ "loss": 0.9195,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900194868858566e-05,
+ "loss": 0.9252,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8999232947900968e-05,
+ "loss": 0.916,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89965137120179e-05,
+ "loss": 0.3312,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899379098199257e-05,
+ "loss": 0.8992,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899106475888246e-05,
+ "loss": 0.9512,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89883350437464e-05,
+ "loss": 0.9483,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8985601837644586e-05,
+ "loss": 0.954,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8982865141638557e-05,
+ "loss": 0.9777,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8980124956791216e-05,
+ "loss": 0.9181,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8977381284166818e-05,
+ "loss": 0.9654,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897463412483098e-05,
+ "loss": 0.8833,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897188347985066e-05,
+ "loss": 0.9304,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896912935029418e-05,
+ "loss": 0.9227,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896637173723121e-05,
+ "loss": 0.9524,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8963610641732777e-05,
+ "loss": 0.9938,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8960846064871257e-05,
+ "loss": 0.8756,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8958078007720387e-05,
+ "loss": 0.8807,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895530647135524e-05,
+ "loss": 0.9026,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8952531456852248e-05,
+ "loss": 0.3359,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8949752965289197e-05,
+ "loss": 0.8913,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894697099774523e-05,
+ "loss": 0.8688,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894418555530082e-05,
+ "loss": 0.9398,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89413966390378e-05,
+ "loss": 0.9413,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8938604250039362e-05,
+ "loss": 0.9731,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935808389390032e-05,
+ "loss": 0.9106,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.893300905817569e-05,
+ "loss": 0.8899,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8930206257483566e-05,
+ "loss": 0.983,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8927399988402233e-05,
+ "loss": 0.9512,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8924590252021614e-05,
+ "loss": 0.9165,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8921777049432985e-05,
+ "loss": 0.35,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8918960381728947e-05,
+ "loss": 0.9625,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8916140250003475e-05,
+ "loss": 0.905,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891331665535187e-05,
+ "loss": 0.9542,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8910489598870784e-05,
+ "loss": 0.9589,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8907659081658214e-05,
+ "loss": 0.9409,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8904825104813497e-05,
+ "loss": 0.89,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8901987669437322e-05,
+ "loss": 0.944,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.889914677663171e-05,
+ "loss": 0.9217,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8896302427500042e-05,
+ "loss": 0.8912,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8893454623147017e-05,
+ "loss": 0.9592,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88906033646787e-05,
+ "loss": 0.9194,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8887748653202478e-05,
+ "loss": 0.9415,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8884890489827097e-05,
+ "loss": 0.8378,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8882028875662627e-05,
+ "loss": 0.8577,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8879163811820493e-05,
+ "loss": 0.9159,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876295299413445e-05,
+ "loss": 0.8698,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8873423339555584e-05,
+ "loss": 0.9418,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8870547933362352e-05,
+ "loss": 0.3612,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886766908195051e-05,
+ "loss": 0.9417,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864786786438187e-05,
+ "loss": 0.9079,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886190104794482e-05,
+ "loss": 0.9634,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8859011867591203e-05,
+ "loss": 0.9203,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885611924649946e-05,
+ "loss": 0.9226,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885322318579305e-05,
+ "loss": 0.9712,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8850323686596766e-05,
+ "loss": 0.9656,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8847420750036748e-05,
+ "loss": 0.8569,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884451437724046e-05,
+ "loss": 0.9105,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8841604569336702e-05,
+ "loss": 0.9168,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883869132745561e-05,
+ "loss": 0.8854,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883577465272866e-05,
+ "loss": 0.8706,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8832854546288642e-05,
+ "loss": 0.9097,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8829931009269707e-05,
+ "loss": 0.9096,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882700404280731e-05,
+ "loss": 0.932,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8824073648038258e-05,
+ "loss": 0.9086,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882113982610068e-05,
+ "loss": 0.9184,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881820257813404e-05,
+ "loss": 0.9778,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8815261905279133e-05,
+ "loss": 0.9168,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8812317808678075e-05,
+ "loss": 0.3528,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8809370289474327e-05,
+ "loss": 0.9731,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8806419348812673e-05,
+ "loss": 0.9179,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8803464987839217e-05,
+ "loss": 0.9582,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88005072077014e-05,
+ "loss": 0.947,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797546009547996e-05,
+ "loss": 0.958,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.879458139452909e-05,
+ "loss": 0.9568,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791613363796118e-05,
+ "loss": 0.9292,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8788641918501817e-05,
+ "loss": 0.8576,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785667059800264e-05,
+ "loss": 0.8156,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8782688788846865e-05,
+ "loss": 0.8837,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877970710679834e-05,
+ "loss": 0.9345,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877672201481275e-05,
+ "loss": 0.8941,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877373351404946e-05,
+ "loss": 0.9011,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8770741605669173e-05,
+ "loss": 0.9253,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.876774629083391e-05,
+ "loss": 0.9121,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8764747570707017e-05,
+ "loss": 0.937,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8761745446453167e-05,
+ "loss": 0.9532,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875873991923835e-05,
+ "loss": 0.9758,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875573099022987e-05,
+ "loss": 0.9477,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8752718660596367e-05,
+ "loss": 0.9285,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8749702931507797e-05,
+ "loss": 0.904,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.874668380413543e-05,
+ "loss": 0.8815,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8743661279651856e-05,
+ "loss": 0.9679,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8740635359231e-05,
+ "loss": 0.9621,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8737606044048086e-05,
+ "loss": 0.8696,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873457333527967e-05,
+ "loss": 0.9741,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873153723410362e-05,
+ "loss": 0.3411,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8728497741699115e-05,
+ "loss": 0.9085,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872545485924667e-05,
+ "loss": 0.9056,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8722408587928104e-05,
+ "loss": 1.0055,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8719358928926546e-05,
+ "loss": 0.9415,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8716305883426456e-05,
+ "loss": 0.8918,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.87132494526136e-05,
+ "loss": 0.8949,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710189637675055e-05,
+ "loss": 0.8764,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8707126439799225e-05,
+ "loss": 0.8984,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.870405986017582e-05,
+ "loss": 0.8995,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8700989899995857e-05,
+ "loss": 0.9052,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697916560451682e-05,
+ "loss": 0.923,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.869483984273694e-05,
+ "loss": 0.8883,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691759748046594e-05,
+ "loss": 0.952,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8688676277576916e-05,
+ "loss": 0.971,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868558943252549e-05,
+ "loss": 0.908,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868249921409122e-05,
+ "loss": 0.9385,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8679405623474294e-05,
+ "loss": 0.9478,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8676308661876242e-05,
+ "loss": 0.9882,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8673208330499884e-05,
+ "loss": 0.898,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8670104630549348e-05,
+ "loss": 0.9524,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866699756323008e-05,
+ "loss": 0.8181,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866388712974883e-05,
+ "loss": 0.9904,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866077333131365e-05,
+ "loss": 0.8949,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8657656169133908e-05,
+ "loss": 0.9498,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8654535644420276e-05,
+ "loss": 0.8722,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8651411758384718e-05,
+ "loss": 0.8998,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8648284512240527e-05,
+ "loss": 0.8935,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8645153907202285e-05,
+ "loss": 0.8978,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8642019944485884e-05,
+ "loss": 0.331,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863888262530852e-05,
+ "loss": 0.9529,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863574195088869e-05,
+ "loss": 0.8517,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8632597922446195e-05,
+ "loss": 0.9371,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8629450541202142e-05,
+ "loss": 0.971,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626299808378933e-05,
+ "loss": 0.9409,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.862314572520028e-05,
+ "loss": 0.9195,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861998829289119e-05,
+ "loss": 1.0161,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861682751267798e-05,
+ "loss": 0.8846,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861366338578825e-05,
+ "loss": 0.9276,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8610495913450922e-05,
+ "loss": 0.9159,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8607325096896197e-05,
+ "loss": 0.3851,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8604150937355588e-05,
+ "loss": 0.8811,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.86009734360619e-05,
+ "loss": 0.9636,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8597792594249237e-05,
+ "loss": 0.9326,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8594608413153e-05,
+ "loss": 0.9532,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8591420894009897e-05,
+ "loss": 0.9115,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8588230038057913e-05,
+ "loss": 0.9345,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8585035846536347e-05,
+ "loss": 0.9732,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8581838320685782e-05,
+ "loss": 0.96,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8578637461748105e-05,
+ "loss": 0.946,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857543327096649e-05,
+ "loss": 0.9461,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.85722257495854e-05,
+ "loss": 0.9272,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856901489885061e-05,
+ "loss": 0.9934,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856580072000918e-05,
+ "loss": 0.8986,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8562583214309447e-05,
+ "loss": 0.9382,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855936238300106e-05,
+ "loss": 0.9913,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8556138227334957e-05,
+ "loss": 0.9356,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855291074856336e-05,
+ "loss": 0.9394,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8549679947939778e-05,
+ "loss": 0.9326,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8546445826719023e-05,
+ "loss": 0.9758,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8543208386157195e-05,
+ "loss": 0.9908,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853996762751167e-05,
+ "loss": 0.9939,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8536723552041124e-05,
+ "loss": 0.9334,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853347616100552e-05,
+ "loss": 0.8662,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8530225455666103e-05,
+ "loss": 0.874,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8526971437285416e-05,
+ "loss": 0.9607,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8523714107127278e-05,
+ "loss": 0.9436,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520453466456797e-05,
+ "loss": 0.9564,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8517189516540376e-05,
+ "loss": 0.9919,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513922258645687e-05,
+ "loss": 0.9457,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8510651694041702e-05,
+ "loss": 0.9113,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8507377823998664e-05,
+ "loss": 0.889,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.850410064978811e-05,
+ "loss": 0.8957,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8500820172682858e-05,
+ "loss": 0.9713,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8497536393957005e-05,
+ "loss": 0.3168,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8494249314885932e-05,
+ "loss": 0.8657,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8490958936746304e-05,
+ "loss": 0.9021,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.848766526081607e-05,
+ "loss": 0.8838,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8484368288374452e-05,
+ "loss": 0.9475,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8481068020701954e-05,
+ "loss": 0.9845,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8477764459080364e-05,
+ "loss": 0.8957,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8474457604792746e-05,
+ "loss": 0.3215,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8471147459123447e-05,
+ "loss": 0.9442,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8467834023358088e-05,
+ "loss": 0.8866,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846451729878357e-05,
+ "loss": 0.9572,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846119728668807e-05,
+ "loss": 0.9419,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845787398836104e-05,
+ "loss": 0.9387,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8454547405093212e-05,
+ "loss": 0.9377,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8451217538176597e-05,
+ "loss": 0.9553,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.844788438890447e-05,
+ "loss": 0.986,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444547958571396e-05,
+ "loss": 0.3361,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.84412082484732e-05,
+ "loss": 0.3385,
+ "step": 1064
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8437865259906987e-05,
+ "loss": 0.9415,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8434518994171136e-05,
+ "loss": 0.9397,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84311694525653e-05,
+ "loss": 0.9054,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84278166363904e-05,
+ "loss": 0.8955,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8424460546948632e-05,
+ "loss": 0.9017,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8421101185543463e-05,
+ "loss": 0.92,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841773855347963e-05,
+ "loss": 0.9055,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841437265206314e-05,
+ "loss": 0.8364,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841100348260127e-05,
+ "loss": 0.9515,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.840763104640257e-05,
+ "loss": 0.9436,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8404255344776853e-05,
+ "loss": 0.9395,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84008763790352e-05,
+ "loss": 0.8926,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8397494150489963e-05,
+ "loss": 0.9672,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8394108660454766e-05,
+ "loss": 0.865,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8390719910244487e-05,
+ "loss": 0.9504,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8387327901175286e-05,
+ "loss": 0.941,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838393263456457e-05,
+ "loss": 0.9799,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838053411173103e-05,
+ "loss": 0.9256,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8377132333994606e-05,
+ "loss": 0.9193,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837372730267652e-05,
+ "loss": 0.8726,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8370319019099236e-05,
+ "loss": 0.9096,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8366907484586497e-05,
+ "loss": 0.9407,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83634927004633e-05,
+ "loss": 0.9167,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360074668055915e-05,
+ "loss": 0.9128,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8356653388691857e-05,
+ "loss": 0.8422,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353228863699922e-05,
+ "loss": 0.908,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8349801094410148e-05,
+ "loss": 0.8724,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346370082153843e-05,
+ "loss": 0.9003,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8342935828263574e-05,
+ "loss": 0.98,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339498334073166e-05,
+ "loss": 0.8614,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83360576009177e-05,
+ "loss": 0.911,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.833261363013352e-05,
+ "loss": 0.8732,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832916642305822e-05,
+ "loss": 0.8753,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832571598103066e-05,
+ "loss": 0.9483,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8322262305390948e-05,
+ "loss": 0.974,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8318805397480455e-05,
+ "loss": 0.9167,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8315345258641802e-05,
+ "loss": 0.9712,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8311881890218873e-05,
+ "loss": 0.9197,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830841529355679e-05,
+ "loss": 0.9173,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8304945470001948e-05,
+ "loss": 0.9408,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8301472420901985e-05,
+ "loss": 0.9391,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8297996147605787e-05,
+ "loss": 0.9482,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.829451665146351e-05,
+ "loss": 0.8962,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8291033933826535e-05,
+ "loss": 0.9761,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8287547996047523e-05,
+ "loss": 0.933,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8284058839480363e-05,
+ "loss": 0.8762,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8280566465480206e-05,
+ "loss": 0.9217,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8277070875403455e-05,
+ "loss": 0.3614,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8273572070607756e-05,
+ "loss": 0.9359,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8270070052451995e-05,
+ "loss": 0.9401,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8266564822296323e-05,
+ "loss": 0.9186,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.826305638150213e-05,
+ "loss": 0.321,
+ "step": 1116
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.825954473143205e-05,
+ "loss": 0.3378,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8256029873449976e-05,
+ "loss": 0.8492,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.825251180892103e-05,
+ "loss": 0.3327,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8248990539211596e-05,
+ "loss": 0.8799,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8245466065689282e-05,
+ "loss": 0.9734,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824193838972297e-05,
+ "loss": 0.9092,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823840751268275e-05,
+ "loss": 0.8317,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8234873435939987e-05,
+ "loss": 0.8746,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8231336160867275e-05,
+ "loss": 0.9396,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8227795688838446e-05,
+ "loss": 0.9045,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822425202122858e-05,
+ "loss": 0.9036,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8220705159413996e-05,
+ "loss": 0.8528,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8217155104772256e-05,
+ "loss": 0.9213,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8213601858682158e-05,
+ "loss": 0.4092,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8210045422523744e-05,
+ "loss": 0.9155,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8206485797678294e-05,
+ "loss": 0.3397,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820292298552832e-05,
+ "loss": 0.9601,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819935698745759e-05,
+ "loss": 0.8789,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8195787804851076e-05,
+ "loss": 0.9189,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8192215439095025e-05,
+ "loss": 0.8967,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8188639891576893e-05,
+ "loss": 0.8964,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8185061163685386e-05,
+ "loss": 0.8626,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.818147925681044e-05,
+ "loss": 0.9607,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8177894172343227e-05,
+ "loss": 0.9404,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.817430591167615e-05,
+ "loss": 0.9238,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8170714476202848e-05,
+ "loss": 0.9255,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8167119867318197e-05,
+ "loss": 0.3475,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81635220864183e-05,
+ "loss": 0.8938,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8159921134900486e-05,
+ "loss": 0.9342,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156317014163337e-05,
+ "loss": 0.9245,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8152709725606642e-05,
+ "loss": 0.9467,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149099270631434e-05,
+ "loss": 0.8735,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8145485650639973e-05,
+ "loss": 0.3534,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8141868867035745e-05,
+ "loss": 0.9545,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8138248921223465e-05,
+ "loss": 0.8612,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134625814609084e-05,
+ "loss": 0.3531,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8130999548599767e-05,
+ "loss": 0.9884,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8127370124603927e-05,
+ "loss": 0.938,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8123737544031178e-05,
+ "loss": 0.9063,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8120101808292373e-05,
+ "loss": 0.9163,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81164629187996e-05,
+ "loss": 0.9941,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811282087696615e-05,
+ "loss": 0.8835,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8109175684206558e-05,
+ "loss": 0.8915,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8105527341936574e-05,
+ "loss": 0.9839,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.810187585157317e-05,
+ "loss": 0.3224,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8098221214534543e-05,
+ "loss": 0.307,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8094563432240107e-05,
+ "loss": 0.9391,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8090902506110513e-05,
+ "loss": 0.9469,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8087238437567614e-05,
+ "loss": 0.9498,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8083571228034498e-05,
+ "loss": 0.912,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807990087893546e-05,
+ "loss": 0.8633,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807622739169603e-05,
+ "loss": 0.952,
+ "step": 1168
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807255076774294e-05,
+ "loss": 0.3537,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8068871008504153e-05,
+ "loss": 0.9312,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065188115408844e-05,
+ "loss": 1.0083,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8061502089887406e-05,
+ "loss": 0.9158,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805781293337145e-05,
+ "loss": 0.8719,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8054120647293798e-05,
+ "loss": 0.8885,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050425233088496e-05,
+ "loss": 0.3342,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.80467266921908e-05,
+ "loss": 0.8902,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8043025026037178e-05,
+ "loss": 0.8875,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8039320236065314e-05,
+ "loss": 0.9133,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803561232371411e-05,
+ "loss": 0.9502,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803190129042367e-05,
+ "loss": 0.9052,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8028187137635325e-05,
+ "loss": 0.9564,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8024469866791602e-05,
+ "loss": 0.9611,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.802074947933625e-05,
+ "loss": 0.9002,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801702597671422e-05,
+ "loss": 0.3337,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8013299360371685e-05,
+ "loss": 0.8692,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8009569631756013e-05,
+ "loss": 0.9799,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8005836792315793e-05,
+ "loss": 0.8318,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.800210084350081e-05,
+ "loss": 0.9676,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799836178676207e-05,
+ "loss": 0.9258,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799461962355178e-05,
+ "loss": 0.9595,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7990874355323345e-05,
+ "loss": 0.9038,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7987125983531393e-05,
+ "loss": 0.8838,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7983374509631742e-05,
+ "loss": 0.8658,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7979619935081424e-05,
+ "loss": 0.9302,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797586226133867e-05,
+ "loss": 0.8896,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7972101489862924e-05,
+ "loss": 0.9137,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7968337622114824e-05,
+ "loss": 0.8774,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7964570659556206e-05,
+ "loss": 0.9242,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796080060365012e-05,
+ "loss": 0.9346,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7957027455860815e-05,
+ "loss": 0.944,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795325121765373e-05,
+ "loss": 0.8704,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.794947189049552e-05,
+ "loss": 0.3457,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7945689475854033e-05,
+ "loss": 0.9214,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941903975198305e-05,
+ "loss": 0.8942,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7938115389998595e-05,
+ "loss": 0.931,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7934323721726334e-05,
+ "loss": 0.9481,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7930528971854166e-05,
+ "loss": 0.951,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792673114185593e-05,
+ "loss": 0.9125,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7922930233206656e-05,
+ "loss": 0.937,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7919126247382576e-05,
+ "loss": 0.9068,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.791531918586112e-05,
+ "loss": 0.896,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7911509050120892e-05,
+ "loss": 0.97,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7907695841641716e-05,
+ "loss": 0.892,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7903879561904597e-05,
+ "loss": 0.9074,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.790006021239173e-05,
+ "loss": 0.9188,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789623779458651e-05,
+ "loss": 0.3391,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789241230997352e-05,
+ "loss": 1.015,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7888583760038534e-05,
+ "loss": 0.8729,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7884752146268513e-05,
+ "loss": 0.8719,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7880917470151614e-05,
+ "loss": 0.9325,
+ "step": 1220
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7877079733177185e-05,
+ "loss": 0.9018,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7873238936835754e-05,
+ "loss": 0.8804,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786939508261904e-05,
+ "loss": 0.9016,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786554817201996e-05,
+ "loss": 0.9105,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.78616982065326e-05,
+ "loss": 0.3369,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.785784518765225e-05,
+ "loss": 0.8996,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7853989116875373e-05,
+ "loss": 0.3376,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7850129995699626e-05,
+ "loss": 0.8681,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846267825623843e-05,
+ "loss": 0.9937,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7842402608148053e-05,
+ "loss": 0.9595,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838534344773453e-05,
+ "loss": 0.8766,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7834663037002444e-05,
+ "loss": 0.921,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830788686338586e-05,
+ "loss": 0.9094,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7826911294286636e-05,
+ "loss": 0.8882,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.782303086235253e-05,
+ "loss": 0.9083,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781914739204338e-05,
+ "loss": 0.7964,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7815260884867486e-05,
+ "loss": 0.9136,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781137134233432e-05,
+ "loss": 0.9764,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7807478765954532e-05,
+ "loss": 0.9434,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7803583157239958e-05,
+ "loss": 0.9205,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7799684517703605e-05,
+ "loss": 0.8635,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779578284885966e-05,
+ "loss": 0.3322,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779187815222349e-05,
+ "loss": 0.8767,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778797042931163e-05,
+ "loss": 0.902,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7784059681641798e-05,
+ "loss": 0.9385,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778014591073288e-05,
+ "loss": 0.934,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777622911810494e-05,
+ "loss": 0.8906,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777230930527922e-05,
+ "loss": 0.8875,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7768386473778124e-05,
+ "loss": 0.9189,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7764460625125236e-05,
+ "loss": 0.8831,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776053176084531e-05,
+ "loss": 0.9489,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7756599882464274e-05,
+ "loss": 0.9754,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7752664991509224e-05,
+ "loss": 0.8612,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7748727089508423e-05,
+ "loss": 0.8817,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7744786177991307e-05,
+ "loss": 0.3096,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.774084225848849e-05,
+ "loss": 0.8883,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.773689533253173e-05,
+ "loss": 0.8886,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732945401653978e-05,
+ "loss": 0.8891,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7728992467389342e-05,
+ "loss": 0.9199,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7725036531273087e-05,
+ "loss": 0.9217,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7721077594841663e-05,
+ "loss": 0.9471,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.771711565963267e-05,
+ "loss": 0.8964,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7713150727184878e-05,
+ "loss": 0.9179,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770918279903822e-05,
+ "loss": 0.8938,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.77052118767338e-05,
+ "loss": 0.8972,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7701237961813874e-05,
+ "loss": 0.9037,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7697261055821864e-05,
+ "loss": 0.9953,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7693281160302354e-05,
+ "loss": 0.916,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7689298276801095e-05,
+ "loss": 0.8463,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7685312406864986e-05,
+ "loss": 0.9613,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7681323552042094e-05,
+ "loss": 0.34,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.767733171388165e-05,
+ "loss": 0.9194,
+ "step": 1272
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7673336893934033e-05,
+ "loss": 0.8789,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7669339093750786e-05,
+ "loss": 0.872,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.766533831488461e-05,
+ "loss": 0.9512,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7661334558889357e-05,
+ "loss": 0.8791,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7657327827320046e-05,
+ "loss": 0.8505,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765331812173284e-05,
+ "loss": 0.9258,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7649305443685068e-05,
+ "loss": 0.8811,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76452897947352e-05,
+ "loss": 0.8763,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7641271176442876e-05,
+ "loss": 0.8905,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7637249590368878e-05,
+ "loss": 0.9268,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763322503807514e-05,
+ "loss": 0.9025,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7629197521124758e-05,
+ "loss": 0.8868,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7625167041081967e-05,
+ "loss": 0.9169,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7621133599512163e-05,
+ "loss": 0.8898,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761709719798189e-05,
+ "loss": 0.9608,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761305783805883e-05,
+ "loss": 0.9333,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7609015521311836e-05,
+ "loss": 0.3426,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7604970249310893e-05,
+ "loss": 0.8983,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600922023627137e-05,
+ "loss": 0.8992,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759687084583285e-05,
+ "loss": 0.927,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759281671750147e-05,
+ "loss": 0.9638,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7588759640207564e-05,
+ "loss": 0.3531,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7584699615526857e-05,
+ "loss": 0.9061,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7580636645036224e-05,
+ "loss": 0.9489,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757657073031367e-05,
+ "loss": 0.985,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7572501872938343e-05,
+ "loss": 0.3491,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.756843007449055e-05,
+ "loss": 0.9456,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7564355336551727e-05,
+ "loss": 0.8545,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7560277660704455e-05,
+ "loss": 0.3479,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.755619704853246e-05,
+ "loss": 0.8946,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7552113501620595e-05,
+ "loss": 0.8977,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7548027021554874e-05,
+ "loss": 0.9164,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.754393760992243e-05,
+ "loss": 0.9089,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7539845268311548e-05,
+ "loss": 0.9209,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7535749998311645e-05,
+ "loss": 0.9072,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.753165180151328e-05,
+ "loss": 0.3462,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752755067950814e-05,
+ "loss": 0.9508,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752344663388906e-05,
+ "loss": 0.9197,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7519339666249997e-05,
+ "loss": 0.8936,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515229778186052e-05,
+ "loss": 0.897,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7511116971293463e-05,
+ "loss": 0.8872,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7507001247169587e-05,
+ "loss": 0.9111,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7502882607412933e-05,
+ "loss": 0.9244,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749876105362313e-05,
+ "loss": 0.9142,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7494636587400942e-05,
+ "loss": 0.9361,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749050921034826e-05,
+ "loss": 0.9259,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7486378924068123e-05,
+ "loss": 0.9613,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.748224573016467e-05,
+ "loss": 0.9206,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7478109630243195e-05,
+ "loss": 0.8995,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.747397062591011e-05,
+ "loss": 0.8912,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.746982871877296e-05,
+ "loss": 0.9132,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7465683910440405e-05,
+ "loss": 0.3367,
+ "step": 1324
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7461536202522248e-05,
+ "loss": 0.8463,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.745738559662941e-05,
+ "loss": 0.8797,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7453232094373936e-05,
+ "loss": 0.9016,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7449075697369005e-05,
+ "loss": 0.9495,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7444916407228904e-05,
+ "loss": 0.8987,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.744075422556906e-05,
+ "loss": 0.8992,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7436589154006014e-05,
+ "loss": 0.991,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.743242119415743e-05,
+ "loss": 0.8775,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7428250347642102e-05,
+ "loss": 0.8728,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7424076616079933e-05,
+ "loss": 0.9342,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7419900001091953e-05,
+ "loss": 0.9245,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7415720504300314e-05,
+ "loss": 0.9226,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.741153812732828e-05,
+ "loss": 0.9372,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7407352871800246e-05,
+ "loss": 0.8907,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403164739341708e-05,
+ "loss": 0.911,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739897373157929e-05,
+ "loss": 0.8968,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394779850140736e-05,
+ "loss": 0.938,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7390583096654895e-05,
+ "loss": 0.8963,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7386383472751745e-05,
+ "loss": 0.9017,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7382180980062365e-05,
+ "loss": 0.9331,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377975620218954e-05,
+ "loss": 0.9107,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7373767394854836e-05,
+ "loss": 0.9292,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7369556305604422e-05,
+ "loss": 0.9282,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.736534235410326e-05,
+ "loss": 0.8801,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7361125541988e-05,
+ "loss": 0.8528,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7356905870896407e-05,
+ "loss": 0.9513,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.735268334246734e-05,
+ "loss": 0.878,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7348457958340792e-05,
+ "loss": 0.3268,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7344229720157846e-05,
+ "loss": 0.8879,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7339998629560705e-05,
+ "loss": 0.9062,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7335764688192676e-05,
+ "loss": 0.9827,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733152789769817e-05,
+ "loss": 1.0078,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7327288259722714e-05,
+ "loss": 0.9629,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7323045775912927e-05,
+ "loss": 0.9111,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7318800447916543e-05,
+ "loss": 0.94,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7314552277382403e-05,
+ "loss": 0.9217,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7310301265960446e-05,
+ "loss": 0.8825,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7306047415301706e-05,
+ "loss": 0.8768,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7301790727058344e-05,
+ "loss": 0.8997,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7297531202883598e-05,
+ "loss": 0.9012,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7293268844431826e-05,
+ "loss": 0.8939,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289003653358472e-05,
+ "loss": 0.9494,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7284735631320093e-05,
+ "loss": 0.8624,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280464779974335e-05,
+ "loss": 0.9329,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7276191100979952e-05,
+ "loss": 0.8958,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271914595996784e-05,
+ "loss": 0.8791,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7267635266685782e-05,
+ "loss": 0.9476,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7263353114708993e-05,
+ "loss": 0.963,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7259068141729542e-05,
+ "loss": 0.8908,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7254780349411677e-05,
+ "loss": 0.3562,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7250489739420718e-05,
+ "loss": 0.969,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7246196313423095e-05,
+ "loss": 0.3459,
+ "step": 1376
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7241900073086318e-05,
+ "loss": 0.9044,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7237601020079003e-05,
+ "loss": 0.8814,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7233299156070852e-05,
+ "loss": 0.9551,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7228994482732653e-05,
+ "loss": 0.9468,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.72246870017363e-05,
+ "loss": 0.9222,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7220376714754766e-05,
+ "loss": 0.8468,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7216063623462112e-05,
+ "loss": 0.8935,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7211747729533504e-05,
+ "loss": 0.9338,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7207429034645176e-05,
+ "loss": 0.8834,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720310754047446e-05,
+ "loss": 0.9381,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719878324869978e-05,
+ "loss": 0.896,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7194456161000634e-05,
+ "loss": 0.9337,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719012627905762e-05,
+ "loss": 0.8796,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.718579360455241e-05,
+ "loss": 0.3521,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7181458139167767e-05,
+ "loss": 0.8592,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7177119884587536e-05,
+ "loss": 0.8868,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717277884249664e-05,
+ "loss": 0.9154,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716843501458109e-05,
+ "loss": 0.3666,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716408840252799e-05,
+ "loss": 0.895,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7159739008025503e-05,
+ "loss": 0.9729,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155386832762892e-05,
+ "loss": 0.3205,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.715103187843048e-05,
+ "loss": 0.945,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146674146719688e-05,
+ "loss": 0.9108,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7142313639323012e-05,
+ "loss": 0.8483,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137950357934017e-05,
+ "loss": 0.9048,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7133584304247354e-05,
+ "loss": 0.8964,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7129215479958747e-05,
+ "loss": 0.8927,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7124843886765e-05,
+ "loss": 0.898,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712046952636398e-05,
+ "loss": 0.8874,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7116092400454655e-05,
+ "loss": 0.8855,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7111712510737035e-05,
+ "loss": 0.8747,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7107329858912226e-05,
+ "loss": 0.8969,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7102944446682393e-05,
+ "loss": 0.9312,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.709855627575079e-05,
+ "loss": 0.9206,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7094165347821724e-05,
+ "loss": 0.9285,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7089771664600584e-05,
+ "loss": 0.872,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.708537522779382e-05,
+ "loss": 0.9077,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7080976039108964e-05,
+ "loss": 0.3426,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7076574100254614e-05,
+ "loss": 0.9247,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707216941294042e-05,
+ "loss": 0.8963,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706776197887712e-05,
+ "loss": 0.8499,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7063351799776514e-05,
+ "loss": 0.9228,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7058938877351456e-05,
+ "loss": 0.887,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.705452321331588e-05,
+ "loss": 0.8649,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7050104809384774e-05,
+ "loss": 0.302,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70456836672742e-05,
+ "loss": 0.876,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.704125978870128e-05,
+ "loss": 0.8863,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7036833175384192e-05,
+ "loss": 0.8995,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7032403829042182e-05,
+ "loss": 0.3592,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7027971751395563e-05,
+ "loss": 0.3541,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7023536944165697e-05,
+ "loss": 0.9257,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7019099409075014e-05,
+ "loss": 0.8985,
+ "step": 1428
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7014659147847005e-05,
+ "loss": 0.9004,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701021616220621e-05,
+ "loss": 0.9354,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7005770453878234e-05,
+ "loss": 0.978,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7001322024589742e-05,
+ "loss": 0.9114,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6996870876068455e-05,
+ "loss": 0.9199,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6992417010043144e-05,
+ "loss": 0.8406,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6987960428243637e-05,
+ "loss": 0.8679,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6983501132400825e-05,
+ "loss": 0.9248,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6979039124246643e-05,
+ "loss": 1.0086,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6974574405514083e-05,
+ "loss": 0.3541,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6970106977937192e-05,
+ "loss": 0.9326,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.696563684325107e-05,
+ "loss": 0.3749,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6961164003191862e-05,
+ "loss": 0.9212,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6956688459496767e-05,
+ "loss": 0.9714,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695221021390404e-05,
+ "loss": 0.8775,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6947729268152972e-05,
+ "loss": 0.9413,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6943245623983918e-05,
+ "loss": 0.97,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6938759283138268e-05,
+ "loss": 0.8966,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.693427024735847e-05,
+ "loss": 0.9482,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.692977851838801e-05,
+ "loss": 0.9045,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925284097971427e-05,
+ "loss": 0.9114,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6920786987854296e-05,
+ "loss": 0.9462,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691628718978325e-05,
+ "loss": 0.9369,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691178470550596e-05,
+ "loss": 0.9344,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6907279536771127e-05,
+ "loss": 0.9304,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6902771685328524e-05,
+ "loss": 0.8722,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6898261152928933e-05,
+ "loss": 0.9627,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6893747941324197e-05,
+ "loss": 0.9426,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6889232052267203e-05,
+ "loss": 0.883,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688471348751186e-05,
+ "loss": 0.8001,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688019224881313e-05,
+ "loss": 0.9631,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6875668337927014e-05,
+ "loss": 0.8921,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6871141756610544e-05,
+ "loss": 0.8152,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6866612506621788e-05,
+ "loss": 0.8897,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6862080589719863e-05,
+ "loss": 0.8766,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6857546007664908e-05,
+ "loss": 0.9122,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6853008762218103e-05,
+ "loss": 0.9693,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684846885514166e-05,
+ "loss": 0.9323,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6843926288198828e-05,
+ "loss": 0.3472,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683938106315389e-05,
+ "loss": 0.8746,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683483318177216e-05,
+ "loss": 0.3501,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6830282645819974e-05,
+ "loss": 0.9011,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6825729457064718e-05,
+ "loss": 0.9537,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6821173617274793e-05,
+ "loss": 0.8521,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6816615128219635e-05,
+ "loss": 0.8628,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.681205399166971e-05,
+ "loss": 0.876,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6807490209396506e-05,
+ "loss": 0.8959,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6802923783172553e-05,
+ "loss": 0.9094,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.679835471477139e-05,
+ "loss": 0.8905,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793783005967593e-05,
+ "loss": 0.9005,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678920865853676e-05,
+ "loss": 0.3728,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678463167425552e-05,
+ "loss": 0.9065,
+ "step": 1480
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6780052054901512e-05,
+ "loss": 0.8991,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6775469802253416e-05,
+ "loss": 0.8868,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6770884918090923e-05,
+ "loss": 0.8659,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6766297404194745e-05,
+ "loss": 0.9002,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6761707262346624e-05,
+ "loss": 0.8773,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675711449432932e-05,
+ "loss": 0.9197,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6752519101926606e-05,
+ "loss": 0.8743,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6747921086923284e-05,
+ "loss": 0.8821,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674332045110517e-05,
+ "loss": 0.9071,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6738717196259092e-05,
+ "loss": 0.9039,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673411132417291e-05,
+ "loss": 0.9037,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672950283663548e-05,
+ "loss": 0.9522,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6724891735436697e-05,
+ "loss": 0.9196,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6720278022367453e-05,
+ "loss": 0.8583,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6715661699219664e-05,
+ "loss": 0.9125,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6711042767786257e-05,
+ "loss": 0.8885,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6706421229861168e-05,
+ "loss": 0.8732,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6701797087239354e-05,
+ "loss": 0.351,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6697170341716772e-05,
+ "loss": 0.8356,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6692540995090403e-05,
+ "loss": 0.9195,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.668790904915823e-05,
+ "loss": 0.8917,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6683274505719248e-05,
+ "loss": 0.8966,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6678637366573455e-05,
+ "loss": 0.9046,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.667399763352187e-05,
+ "loss": 0.9207,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.666935530836651e-05,
+ "loss": 0.9575,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6664710392910396e-05,
+ "loss": 0.815,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6660062888957564e-05,
+ "loss": 0.9449,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665541279831305e-05,
+ "loss": 0.8779,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650760122782898e-05,
+ "loss": 0.8118,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6646104864174147e-05,
+ "loss": 0.9139,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664144702429485e-05,
+ "loss": 0.9026,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663678660495406e-05,
+ "loss": 0.8231,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663212360796183e-05,
+ "loss": 0.926,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662745803512921e-05,
+ "loss": 0.9112,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662278988826826e-05,
+ "loss": 0.9311,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6618119169192027e-05,
+ "loss": 0.9262,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.661344587971457e-05,
+ "loss": 0.985,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6608770021650945e-05,
+ "loss": 0.9365,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6604091596817193e-05,
+ "loss": 0.904,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6599410607030363e-05,
+ "loss": 0.9535,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6594727054108498e-05,
+ "loss": 0.9517,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.659004093987064e-05,
+ "loss": 0.9498,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6585352266136814e-05,
+ "loss": 0.9883,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6580661034728055e-05,
+ "loss": 0.8979,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6575967247466376e-05,
+ "loss": 0.8531,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.657127090617479e-05,
+ "loss": 0.8912,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656657201267731e-05,
+ "loss": 0.9086,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6561870568798927e-05,
+ "loss": 0.9344,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.655716657636562e-05,
+ "loss": 0.8746,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6552460037204382e-05,
+ "loss": 0.9204,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6547750953143168e-05,
+ "loss": 0.8635,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.654303932601093e-05,
+ "loss": 0.921,
+ "step": 1532
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6538325157637614e-05,
+ "loss": 0.9079,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.653360844985415e-05,
+ "loss": 0.3378,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.652888920449245e-05,
+ "loss": 0.9322,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6524167423385414e-05,
+ "loss": 0.9674,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.651944310836693e-05,
+ "loss": 0.9316,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6514716261271866e-05,
+ "loss": 0.8693,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6509986883936073e-05,
+ "loss": 0.8552,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.650525497819639e-05,
+ "loss": 0.3403,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6500520545890634e-05,
+ "loss": 0.9273,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6495783588857605e-05,
+ "loss": 0.9329,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.649104410893708e-05,
+ "loss": 0.334,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.648630210796982e-05,
+ "loss": 0.8976,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6481557587797562e-05,
+ "loss": 0.9389,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6476810550263023e-05,
+ "loss": 0.9542,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6472060997209898e-05,
+ "loss": 0.8812,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6467308930482863e-05,
+ "loss": 0.9048,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6462554351927558e-05,
+ "loss": 0.876,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6457797263390613e-05,
+ "loss": 0.8298,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6453037666719624e-05,
+ "loss": 0.9022,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6448275563763162e-05,
+ "loss": 0.8829,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.644351095637078e-05,
+ "loss": 0.8775,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6438743846392987e-05,
+ "loss": 0.9378,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6433974235681274e-05,
+ "loss": 0.9564,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6429202126088112e-05,
+ "loss": 0.8495,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6424427519466925e-05,
+ "loss": 0.9205,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641965041767212e-05,
+ "loss": 0.8454,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6414870822559064e-05,
+ "loss": 0.8951,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6410088735984103e-05,
+ "loss": 0.9077,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405304159804534e-05,
+ "loss": 0.931,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6400517095878644e-05,
+ "loss": 0.9085,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6395727546065665e-05,
+ "loss": 0.9335,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6390935512225806e-05,
+ "loss": 0.9383,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6386140996220232e-05,
+ "loss": 0.9486,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6381343999911088e-05,
+ "loss": 0.9645,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6376544525161463e-05,
+ "loss": 0.942,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6371742573835426e-05,
+ "loss": 0.8543,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.636693814779799e-05,
+ "loss": 0.9479,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6362131248915145e-05,
+ "loss": 0.8565,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6357321879053833e-05,
+ "loss": 0.9377,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6352510040081962e-05,
+ "loss": 0.9424,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634769573386839e-05,
+ "loss": 0.8644,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634287896228294e-05,
+ "loss": 0.8406,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6338059727196386e-05,
+ "loss": 0.9085,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6333238030480473e-05,
+ "loss": 0.9058,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6328413874007884e-05,
+ "loss": 0.942,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6323587259652267e-05,
+ "loss": 0.8775,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6318758189288227e-05,
+ "loss": 0.9459,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6313926664791316e-05,
+ "loss": 0.9458,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6309092688038047e-05,
+ "loss": 0.9369,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6304256260905872e-05,
+ "loss": 0.917,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6299417385273216e-05,
+ "loss": 0.9081,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.629457606301943e-05,
+ "loss": 0.3765,
+ "step": 1584
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6289732296024837e-05,
+ "loss": 0.9892,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6284886086170697e-05,
+ "loss": 0.9082,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.628003743533922e-05,
+ "loss": 0.3439,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6275186345413566e-05,
+ "loss": 0.9259,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627033281827785e-05,
+ "loss": 0.362,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265476855817116e-05,
+ "loss": 0.8515,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6260618459917366e-05,
+ "loss": 0.9138,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255757632465553e-05,
+ "loss": 0.9615,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.625089437534956e-05,
+ "loss": 0.9091,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624602869045822e-05,
+ "loss": 0.9077,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624116057968131e-05,
+ "loss": 0.9218,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6236290044909543e-05,
+ "loss": 1.0128,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6231417088034585e-05,
+ "loss": 0.9007,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.622654171094904e-05,
+ "loss": 0.3385,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6221663915546437e-05,
+ "loss": 0.9356,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6216783703721265e-05,
+ "loss": 0.9317,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6211901077368937e-05,
+ "loss": 0.8909,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.620701603838581e-05,
+ "loss": 0.9236,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6202128588669177e-05,
+ "loss": 0.8958,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619723873011727e-05,
+ "loss": 0.866,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6192346464629247e-05,
+ "loss": 0.8925,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6187451794105212e-05,
+ "loss": 0.851,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61825547204462e-05,
+ "loss": 0.879,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6177655245554177e-05,
+ "loss": 0.8873,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617275337133204e-05,
+ "loss": 0.8709,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6167849099683623e-05,
+ "loss": 0.8851,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6162942432513687e-05,
+ "loss": 0.9548,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6158033371727924e-05,
+ "loss": 0.9119,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153121919232962e-05,
+ "loss": 0.8921,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.614820807693635e-05,
+ "loss": 0.9396,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143291846746563e-05,
+ "loss": 0.9238,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613837323057301e-05,
+ "loss": 0.993,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6133452230326035e-05,
+ "loss": 0.8919,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6128528847916883e-05,
+ "loss": 0.8905,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123603085257746e-05,
+ "loss": 0.9203,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6118674944261732e-05,
+ "loss": 0.8348,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6113744426842882e-05,
+ "loss": 0.8634,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6108811534916137e-05,
+ "loss": 0.9083,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6103876270397387e-05,
+ "loss": 0.9459,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609893863520343e-05,
+ "loss": 0.8999,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609399863125198e-05,
+ "loss": 0.9203,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6089056260461687e-05,
+ "loss": 0.8976,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6084111524752107e-05,
+ "loss": 0.888,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607916442604372e-05,
+ "loss": 0.9126,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6074214966257914e-05,
+ "loss": 0.9084,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6069263147317015e-05,
+ "loss": 0.856,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6064308971144236e-05,
+ "loss": 0.9364,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605935243966374e-05,
+ "loss": 0.8824,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6054393554800574e-05,
+ "loss": 0.8943,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604943231848072e-05,
+ "loss": 0.9126,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604446873263106e-05,
+ "loss": 0.9869,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6039502799179394e-05,
+ "loss": 0.9221,
+ "step": 1636
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6034534520054435e-05,
+ "loss": 0.9395,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.60295638971858e-05,
+ "loss": 0.9287,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.602459093250403e-05,
+ "loss": 0.946,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601961562794056e-05,
+ "loss": 0.8648,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601463798542775e-05,
+ "loss": 0.3325,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6009658006898848e-05,
+ "loss": 0.8383,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600467569428803e-05,
+ "loss": 0.85,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599969104953036e-05,
+ "loss": 0.9851,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599470407456182e-05,
+ "loss": 0.8856,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989714771319297e-05,
+ "loss": 0.8561,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5984723141740578e-05,
+ "loss": 0.91,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.597972918776435e-05,
+ "loss": 0.9533,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5974732911330208e-05,
+ "loss": 0.9079,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5969734314378654e-05,
+ "loss": 0.8686,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5964733398851078e-05,
+ "loss": 0.9388,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5959730166689783e-05,
+ "loss": 0.9022,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5954724619837966e-05,
+ "loss": 0.869,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5949716760239722e-05,
+ "loss": 0.9018,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5944706589840046e-05,
+ "loss": 0.8919,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5939694110584833e-05,
+ "loss": 0.9298,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593467932442087e-05,
+ "loss": 0.8993,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5929662233295846e-05,
+ "loss": 0.8597,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5924642839158334e-05,
+ "loss": 0.9543,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.591962114395781e-05,
+ "loss": 0.8902,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5914597149644654e-05,
+ "loss": 0.9517,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5909570858170115e-05,
+ "loss": 0.8964,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5904542271486346e-05,
+ "loss": 0.8226,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5899511391546403e-05,
+ "loss": 0.9308,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5894478220304215e-05,
+ "loss": 0.8725,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5889442759714603e-05,
+ "loss": 0.357,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5884405011733294e-05,
+ "loss": 0.8884,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.587936497831688e-05,
+ "loss": 0.8428,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874322661422856e-05,
+ "loss": 0.8942,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5869278063009602e-05,
+ "loss": 0.9476,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.586423118503638e-05,
+ "loss": 0.8257,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.585918202946334e-05,
+ "loss": 0.9185,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5854130598251514e-05,
+ "loss": 0.9394,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5849076893362822e-05,
+ "loss": 0.9399,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.584402091676006e-05,
+ "loss": 0.9126,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5838962670406918e-05,
+ "loss": 0.9149,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5833902156267956e-05,
+ "loss": 0.938,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.582883937630862e-05,
+ "loss": 0.9592,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5823774332495236e-05,
+ "loss": 0.9069,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581870702679501e-05,
+ "loss": 0.918,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581363746117602e-05,
+ "loss": 0.8892,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580856563760724e-05,
+ "loss": 0.349,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5803491558058486e-05,
+ "loss": 0.9282,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579841522450049e-05,
+ "loss": 0.8366,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5793336638904838e-05,
+ "loss": 0.9711,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.578825580324399e-05,
+ "loss": 0.9039,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5783172719491288e-05,
+ "loss": 0.8891,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577808738962094e-05,
+ "loss": 0.843,
+ "step": 1688
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577299981560803e-05,
+ "loss": 0.3497,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5767909999428513e-05,
+ "loss": 0.9345,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.576281794305922e-05,
+ "loss": 0.93,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575772364847784e-05,
+ "loss": 0.9059,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575262711766294e-05,
+ "loss": 0.8892,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5747528352593956e-05,
+ "loss": 0.8448,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.574242735525119e-05,
+ "loss": 0.8748,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737324127615808e-05,
+ "loss": 0.9469,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5732218671669847e-05,
+ "loss": 0.9469,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727110989396205e-05,
+ "loss": 0.8397,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5722001082778645e-05,
+ "loss": 0.982,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716888953801805e-05,
+ "loss": 0.9181,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5711774604451168e-05,
+ "loss": 0.85,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706658036713093e-05,
+ "loss": 0.3376,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5701539252574795e-05,
+ "loss": 0.362,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5696418254024344e-05,
+ "loss": 0.8803,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.569129504305069e-05,
+ "loss": 0.904,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568616962164362e-05,
+ "loss": 0.8772,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5681041991793788e-05,
+ "loss": 0.8893,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567591215549271e-05,
+ "loss": 0.9031,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567078011473276e-05,
+ "loss": 0.868,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5665645871507152e-05,
+ "loss": 0.841,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5660509427809973e-05,
+ "loss": 0.8552,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565537078563616e-05,
+ "loss": 0.8433,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56502299469815e-05,
+ "loss": 0.8928,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564508691384264e-05,
+ "loss": 0.9244,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5639941688217063e-05,
+ "loss": 0.8972,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5634794272103126e-05,
+ "loss": 0.8691,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562964466750003e-05,
+ "loss": 0.898,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562449287640781e-05,
+ "loss": 0.9465,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5619338900827368e-05,
+ "loss": 0.8966,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5614182742760448e-05,
+ "loss": 0.9137,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5609024404209643e-05,
+ "loss": 0.9303,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5603863887178393e-05,
+ "loss": 0.8798,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5598701193670983e-05,
+ "loss": 0.9252,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559353632569254e-05,
+ "loss": 0.8918,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588369285249048e-05,
+ "loss": 0.8562,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5583200074347318e-05,
+ "loss": 0.9207,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.557802869499501e-05,
+ "loss": 0.8755,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5572855149200637e-05,
+ "loss": 0.9165,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5567679438973543e-05,
+ "loss": 0.9501,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5562501566323906e-05,
+ "loss": 0.9016,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.555732153326276e-05,
+ "loss": 0.9402,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5552139341801965e-05,
+ "loss": 0.8856,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.554695499395423e-05,
+ "loss": 0.8768,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5541768491733092e-05,
+ "loss": 0.9014,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5536579837152927e-05,
+ "loss": 0.8418,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5531389032228955e-05,
+ "loss": 0.8946,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.552619607897722e-05,
+ "loss": 0.9021,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.55210009794146e-05,
+ "loss": 0.8611,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5515803735558827e-05,
+ "loss": 0.9054,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5510604349428438e-05,
+ "loss": 0.9597,
+ "step": 1740
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.550540282304282e-05,
+ "loss": 0.8706,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.550019915842218e-05,
+ "loss": 0.9222,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.549499335758757e-05,
+ "loss": 0.9049,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.548978542256086e-05,
+ "loss": 0.9216,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5484575355364744e-05,
+ "loss": 0.906,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5479363158022763e-05,
+ "loss": 0.8817,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547414883255927e-05,
+ "loss": 0.8898,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.546893238099945e-05,
+ "loss": 0.8456,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5463713805369312e-05,
+ "loss": 0.8614,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5458493107695688e-05,
+ "loss": 0.8622,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5453270290006237e-05,
+ "loss": 0.8739,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544804535432945e-05,
+ "loss": 0.9129,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544281830269462e-05,
+ "loss": 0.8913,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437589137131882e-05,
+ "loss": 0.344,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5432357859672177e-05,
+ "loss": 0.3343,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542712447234728e-05,
+ "loss": 0.9442,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542188897718977e-05,
+ "loss": 0.8802,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5416651376233062e-05,
+ "loss": 0.9224,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5411411671511376e-05,
+ "loss": 0.3322,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5406169865059747e-05,
+ "loss": 0.8669,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5400925958914045e-05,
+ "loss": 0.8587,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5395679955110927e-05,
+ "loss": 0.9005,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53904318556879e-05,
+ "loss": 0.894,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5385181662683244e-05,
+ "loss": 0.9441,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5379929378136088e-05,
+ "loss": 0.8866,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5374675004086353e-05,
+ "loss": 0.8596,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5369418542574782e-05,
+ "loss": 0.9463,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536415999564292e-05,
+ "loss": 0.9096,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5358899365333123e-05,
+ "loss": 0.9065,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5353636653688563e-05,
+ "loss": 0.9263,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.534837186275322e-05,
+ "loss": 0.9296,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5343104994571877e-05,
+ "loss": 0.9103,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.533783605119012e-05,
+ "loss": 0.9263,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5332565034654344e-05,
+ "loss": 0.8902,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5327291947011763e-05,
+ "loss": 0.8487,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5322016790310373e-05,
+ "loss": 0.9495,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5316739566598985e-05,
+ "loss": 0.8295,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.531146027792722e-05,
+ "loss": 0.8741,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.530617892634548e-05,
+ "loss": 0.879,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5300895513904993e-05,
+ "loss": 0.8716,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.529561004265777e-05,
+ "loss": 0.3501,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5290322514656624e-05,
+ "loss": 0.9305,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5285032931955177e-05,
+ "loss": 0.8902,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527974129660784e-05,
+ "loss": 0.8797,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527444761066982e-05,
+ "loss": 0.9201,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5269151876197127e-05,
+ "loss": 0.8549,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5263854095246557e-05,
+ "loss": 0.9351,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5258554269875716e-05,
+ "loss": 0.9333,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5253252402142989e-05,
+ "loss": 0.914,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5247948494107566e-05,
+ "loss": 0.9352,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5242642547829416e-05,
+ "loss": 0.9375,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.523733456536931e-05,
+ "loss": 0.8714,
+ "step": 1792
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5232024548788813e-05,
+ "loss": 0.9665,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5226712500150267e-05,
+ "loss": 0.8637,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5221398421516816e-05,
+ "loss": 0.9001,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5216082314952383e-05,
+ "loss": 0.8326,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521076418252168e-05,
+ "loss": 0.9145,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5205444026290218e-05,
+ "loss": 0.9409,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5200121848324276e-05,
+ "loss": 0.899,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5194797650690926e-05,
+ "loss": 0.893,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5189471435458032e-05,
+ "loss": 0.9221,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5184143204694231e-05,
+ "loss": 0.7968,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5178812960468945e-05,
+ "loss": 0.9386,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5173480704852379e-05,
+ "loss": 0.8234,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5168146439915525e-05,
+ "loss": 0.8813,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5162810167730144e-05,
+ "loss": 0.8989,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5157471890368785e-05,
+ "loss": 0.9244,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152131609904773e-05,
+ "loss": 0.9251,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5146789328412213e-05,
+ "loss": 0.8993,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141445047965984e-05,
+ "loss": 0.342,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5136098770641741e-05,
+ "loss": 0.9025,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.513075049851592e-05,
+ "loss": 0.3408,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5125400233665728e-05,
+ "loss": 0.8834,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5120047978169146e-05,
+ "loss": 0.8897,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5114693734104926e-05,
+ "loss": 0.8895,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5109337503552594e-05,
+ "loss": 0.9055,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5103979288592454e-05,
+ "loss": 0.8434,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5098619091305571e-05,
+ "loss": 0.9234,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5093256913773786e-05,
+ "loss": 0.7853,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50878927580797e-05,
+ "loss": 0.9126,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5082526626306698e-05,
+ "loss": 0.3308,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5077158520538921e-05,
+ "loss": 0.821,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5071788442861277e-05,
+ "loss": 0.8598,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5066416395359444e-05,
+ "loss": 0.8984,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5061042380119864e-05,
+ "loss": 0.8945,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5055666399229743e-05,
+ "loss": 0.9365,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5050288454777047e-05,
+ "loss": 0.3325,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.504490854885051e-05,
+ "loss": 0.9344,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5039526683539627e-05,
+ "loss": 0.961,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5034142860934649e-05,
+ "loss": 0.8653,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5028757083126594e-05,
+ "loss": 0.8737,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5023369352207229e-05,
+ "loss": 0.8489,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5017979670269096e-05,
+ "loss": 0.8895,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.501258803940548e-05,
+ "loss": 0.9791,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500719446171043e-05,
+ "loss": 0.853,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500179893927875e-05,
+ "loss": 0.8926,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996401474205997e-05,
+ "loss": 0.3301,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4991002068588484e-05,
+ "loss": 0.9411,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985600724523282e-05,
+ "loss": 0.9024,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4980197444108205e-05,
+ "loss": 0.9483,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4974792229441826e-05,
+ "loss": 0.9167,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4969385082623473e-05,
+ "loss": 0.9055,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4963976005753216e-05,
+ "loss": 0.8377,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4958565000931877e-05,
+ "loss": 0.9333,
+ "step": 1844
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4953152070261027e-05,
+ "loss": 0.8977,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.494773721584299e-05,
+ "loss": 0.9427,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4942320439780833e-05,
+ "loss": 0.8719,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4936901744178367e-05,
+ "loss": 0.8691,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4931481131140149e-05,
+ "loss": 0.8912,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4926058602771484e-05,
+ "loss": 0.356,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4920634161178424e-05,
+ "loss": 0.8958,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4915207808467756e-05,
+ "loss": 0.8454,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4909779546747011e-05,
+ "loss": 0.9246,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4904349378124467e-05,
+ "loss": 0.8342,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.489891730470914e-05,
+ "loss": 0.913,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4893483328610778e-05,
+ "loss": 0.8311,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.488804745193988e-05,
+ "loss": 0.8267,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4882609676807675e-05,
+ "loss": 0.8444,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4877170005326136e-05,
+ "loss": 0.8312,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4871728439607967e-05,
+ "loss": 0.9051,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4866284981766607e-05,
+ "loss": 0.9088,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4860839633916236e-05,
+ "loss": 0.8904,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4855392398171762e-05,
+ "loss": 0.9397,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.484994327664883e-05,
+ "loss": 0.8327,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4844492271463814e-05,
+ "loss": 0.9035,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4839039384733821e-05,
+ "loss": 0.8804,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4833584618576695e-05,
+ "loss": 0.8932,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4828127975111e-05,
+ "loss": 0.9195,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4822669456456031e-05,
+ "loss": 0.8813,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4817209064731819e-05,
+ "loss": 0.9215,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4811746802059115e-05,
+ "loss": 0.9449,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.48062826705594e-05,
+ "loss": 0.8442,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4800816672354876e-05,
+ "loss": 0.8994,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4795348809568477e-05,
+ "loss": 0.8873,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4789879084323858e-05,
+ "loss": 0.8457,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4784407498745394e-05,
+ "loss": 0.9257,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.477893405495819e-05,
+ "loss": 0.9089,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4773458755088068e-05,
+ "loss": 0.9125,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4767981601261567e-05,
+ "loss": 0.9097,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4762502595605957e-05,
+ "loss": 0.877,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4757021740249213e-05,
+ "loss": 0.8929,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4751539037320044e-05,
+ "loss": 0.877,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4746054488947863e-05,
+ "loss": 0.9186,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4740568097262811e-05,
+ "loss": 0.9156,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473507986439573e-05,
+ "loss": 0.8942,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4729589792478193e-05,
+ "loss": 0.9098,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4724097883642482e-05,
+ "loss": 0.9079,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4718604140021588e-05,
+ "loss": 0.8696,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471310856374922e-05,
+ "loss": 0.8806,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.470761115695979e-05,
+ "loss": 0.8841,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702111921788437e-05,
+ "loss": 0.3691,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4696610860370997e-05,
+ "loss": 0.8541,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4691107974844015e-05,
+ "loss": 0.8623,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.468560326734475e-05,
+ "loss": 0.9252,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4680096740011172e-05,
+ "loss": 0.9047,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4674588394981948e-05,
+ "loss": 0.8317,
+ "step": 1896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4669078234396454e-05,
+ "loss": 0.8868,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4663566260394775e-05,
+ "loss": 0.9246,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4658052475117704e-05,
+ "loss": 0.9418,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4652536880706723e-05,
+ "loss": 0.977,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4647019479304028e-05,
+ "loss": 0.935,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4641500273052516e-05,
+ "loss": 0.881,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.463597926409578e-05,
+ "loss": 0.8564,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4630456454578122e-05,
+ "loss": 0.9335,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.462493184664453e-05,
+ "loss": 0.8867,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4619405442440702e-05,
+ "loss": 0.8895,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4613877244113033e-05,
+ "loss": 0.8933,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4608347253808605e-05,
+ "loss": 0.3472,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.460281547367521e-05,
+ "loss": 0.9395,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4597281905861318e-05,
+ "loss": 0.9227,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4591746552516109e-05,
+ "loss": 0.3281,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4586209415789452e-05,
+ "loss": 0.8451,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4580670497831904e-05,
+ "loss": 0.9553,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4575129800794718e-05,
+ "loss": 0.9058,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4569587326829834e-05,
+ "loss": 0.9239,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564043078089891e-05,
+ "loss": 0.8449,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4558497056728205e-05,
+ "loss": 0.9244,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4552949264898795e-05,
+ "loss": 0.8445,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4547399704756348e-05,
+ "loss": 0.8401,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541848378456255e-05,
+ "loss": 0.8877,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4536295288154594e-05,
+ "loss": 0.9163,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4530740436008111e-05,
+ "loss": 0.8836,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.452518382417425e-05,
+ "loss": 0.9343,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4519625454811135e-05,
+ "loss": 0.8714,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4514065330077575e-05,
+ "loss": 0.9157,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4508503452133053e-05,
+ "loss": 0.8121,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4502939823137744e-05,
+ "loss": 0.9604,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4497374445252496e-05,
+ "loss": 0.8782,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4491807320638835e-05,
+ "loss": 0.9134,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4486238451458972e-05,
+ "loss": 0.8633,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4480667839875786e-05,
+ "loss": 0.9408,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4475095488052843e-05,
+ "loss": 0.8734,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4469521398154381e-05,
+ "loss": 0.927,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4463945572345308e-05,
+ "loss": 0.898,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4458368012791213e-05,
+ "loss": 0.3371,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4452788721658355e-05,
+ "loss": 0.9782,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4447207701113669e-05,
+ "loss": 0.8966,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4441624953324755e-05,
+ "loss": 0.9522,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4436040480459891e-05,
+ "loss": 0.9068,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.443045428468802e-05,
+ "loss": 0.8675,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4424866368178761e-05,
+ "loss": 0.8399,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441927673310239e-05,
+ "loss": 0.8841,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4413685381629855e-05,
+ "loss": 0.9024,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440809231593278e-05,
+ "loss": 0.8847,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4402497538183444e-05,
+ "loss": 0.8777,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4396901050554794e-05,
+ "loss": 0.9327,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391302855220442e-05,
+ "loss": 0.8859,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4385702954354662e-05,
+ "loss": 0.8798,
+ "step": 1948
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.438010135013239e-05,
+ "loss": 0.9352,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4374498044729225e-05,
+ "loss": 0.3367,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4368893040321428e-05,
+ "loss": 0.9483,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4363286339085915e-05,
+ "loss": 0.8883,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.435767794320027e-05,
+ "loss": 0.9052,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4352067854842724e-05,
+ "loss": 0.8696,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434645607619217e-05,
+ "loss": 0.9393,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434084260942816e-05,
+ "loss": 0.9083,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4335227456730902e-05,
+ "loss": 0.3333,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4329610620281253e-05,
+ "loss": 1.0002,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4323992102260733e-05,
+ "loss": 0.8595,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4318371904851502e-05,
+ "loss": 0.8919,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4312750030236382e-05,
+ "loss": 0.8652,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4307126480598852e-05,
+ "loss": 0.8431,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4301501258123024e-05,
+ "loss": 0.9213,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4295874364993672e-05,
+ "loss": 0.9258,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4290245803396221e-05,
+ "loss": 0.9051,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4284615575516737e-05,
+ "loss": 0.8925,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4278983683541934e-05,
+ "loss": 0.3409,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4273350129659173e-05,
+ "loss": 0.872,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4267714916056465e-05,
+ "loss": 0.8765,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.426207804492246e-05,
+ "loss": 0.8701,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4256439518446456e-05,
+ "loss": 0.3227,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4250799338818388e-05,
+ "loss": 0.8665,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.424515750822884e-05,
+ "loss": 0.8541,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4239514028869032e-05,
+ "loss": 0.9197,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4233868902930827e-05,
+ "loss": 0.8156,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4228222132606729e-05,
+ "loss": 0.8795,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4222573720089874e-05,
+ "loss": 0.9272,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216923667574042e-05,
+ "loss": 0.8261,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4211271977253653e-05,
+ "loss": 0.9354,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4205618651323753e-05,
+ "loss": 0.9077,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4199963691980027e-05,
+ "loss": 0.8562,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4194307101418805e-05,
+ "loss": 0.917,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4188648881837033e-05,
+ "loss": 0.919,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4182989035432299e-05,
+ "loss": 0.8722,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4177327564402825e-05,
+ "loss": 0.8983,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4171664470947464e-05,
+ "loss": 0.9448,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416599975726569e-05,
+ "loss": 0.9818,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4160333425557616e-05,
+ "loss": 0.8398,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4154665478023977e-05,
+ "loss": 0.8986,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4148995916866139e-05,
+ "loss": 0.8588,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.41433247442861e-05,
+ "loss": 0.81,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4137651962486472e-05,
+ "loss": 0.3643,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4131977573670499e-05,
+ "loss": 0.931,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412630158004205e-05,
+ "loss": 0.9019,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4120623983805617e-05,
+ "loss": 0.9074,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4114944787166307e-05,
+ "loss": 0.9205,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4109263992329858e-05,
+ "loss": 0.9037,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4103581601502629e-05,
+ "loss": 0.3415,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.409789761689159e-05,
+ "loss": 0.8838,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4092212040704336e-05,
+ "loss": 0.8955,
+ "step": 2000
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.408652487514908e-05,
+ "loss": 0.9042,
+ "step": 2001
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.408083612243465e-05,
+ "loss": 0.3112,
+ "step": 2002
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4075145784770496e-05,
+ "loss": 0.9066,
+ "step": 2003
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4069453864366678e-05,
+ "loss": 0.9343,
+ "step": 2004
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063760363433867e-05,
+ "loss": 0.3589,
+ "step": 2005
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.405806528418336e-05,
+ "loss": 0.8626,
+ "step": 2006
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4052368628827057e-05,
+ "loss": 0.9043,
+ "step": 2007
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4046670399577478e-05,
+ "loss": 0.9,
+ "step": 2008
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4040970598647742e-05,
+ "loss": 0.344,
+ "step": 2009
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4035269228251589e-05,
+ "loss": 0.8871,
+ "step": 2010
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4029566290603368e-05,
+ "loss": 0.8177,
+ "step": 2011
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4023861787918031e-05,
+ "loss": 0.9115,
+ "step": 2012
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4018155722411144e-05,
+ "loss": 0.8899,
+ "step": 2013
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4012448096298874e-05,
+ "loss": 0.9085,
+ "step": 2014
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4006738911798001e-05,
+ "loss": 0.8505,
+ "step": 2015
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.40010281711259e-05,
+ "loss": 0.8961,
+ "step": 2016
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3995315876500565e-05,
+ "loss": 0.9091,
+ "step": 2017
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3989602030140581e-05,
+ "loss": 0.9234,
+ "step": 2018
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.398388663426514e-05,
+ "loss": 0.8828,
+ "step": 2019
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3978169691094037e-05,
+ "loss": 0.8319,
+ "step": 2020
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3972451202847665e-05,
+ "loss": 0.9261,
+ "step": 2021
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3966731171747024e-05,
+ "loss": 0.8725,
+ "step": 2022
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3961009600013702e-05,
+ "loss": 0.8844,
+ "step": 2023
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3955286489869894e-05,
+ "loss": 0.8844,
+ "step": 2024
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.394956184353839e-05,
+ "loss": 0.8217,
+ "step": 2025
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3943835663242577e-05,
+ "loss": 0.9053,
+ "step": 2026
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3938107951206438e-05,
+ "loss": 0.8662,
+ "step": 2027
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932378709654548e-05,
+ "loss": 0.8742,
+ "step": 2028
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3926647940812081e-05,
+ "loss": 0.889,
+ "step": 2029
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39209156469048e-05,
+ "loss": 0.8695,
+ "step": 2030
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3915181830159061e-05,
+ "loss": 0.9434,
+ "step": 2031
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3909446492801819e-05,
+ "loss": 0.8806,
+ "step": 2032
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3903709637060605e-05,
+ "loss": 0.9066,
+ "step": 2033
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3897971265163546e-05,
+ "loss": 0.8069,
+ "step": 2034
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3892231379339369e-05,
+ "loss": 0.8629,
+ "step": 2035
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3886489981817375e-05,
+ "loss": 0.9743,
+ "step": 2036
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3880747074827454e-05,
+ "loss": 0.8458,
+ "step": 2037
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3875002660600085e-05,
+ "loss": 0.8798,
+ "step": 2038
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386925674136634e-05,
+ "loss": 0.3686,
+ "step": 2039
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3863509319357857e-05,
+ "loss": 0.9186,
+ "step": 2040
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3857760396806876e-05,
+ "loss": 0.2869,
+ "step": 2041
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3852009975946209e-05,
+ "loss": 0.9297,
+ "step": 2042
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3846258059009252e-05,
+ "loss": 0.8997,
+ "step": 2043
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.384050464822999e-05,
+ "loss": 0.9275,
+ "step": 2044
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.383474974584297e-05,
+ "loss": 0.8687,
+ "step": 2045
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3828993354083342e-05,
+ "loss": 0.8515,
+ "step": 2046
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3823235475186816e-05,
+ "loss": 0.9319,
+ "step": 2047
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3817476111389685e-05,
+ "loss": 0.8814,
+ "step": 2048
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3811715264928824e-05,
+ "loss": 0.9526,
+ "step": 2049
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3805952938041674e-05,
+ "loss": 0.9228,
+ "step": 2050
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3800189132966257e-05,
+ "loss": 0.921,
+ "step": 2051
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3794423851941174e-05,
+ "loss": 0.8548,
+ "step": 2052
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.378865709720559e-05,
+ "loss": 0.359,
+ "step": 2053
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3782888870999245e-05,
+ "loss": 0.879,
+ "step": 2054
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.377711917556245e-05,
+ "loss": 0.8598,
+ "step": 2055
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3771348013136096e-05,
+ "loss": 0.9131,
+ "step": 2056
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765575385961627e-05,
+ "loss": 0.8867,
+ "step": 2057
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3759801296281072e-05,
+ "loss": 0.3295,
+ "step": 2058
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3754025746337014e-05,
+ "loss": 0.8873,
+ "step": 2059
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3748248738372616e-05,
+ "loss": 0.893,
+ "step": 2060
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3742470274631599e-05,
+ "loss": 0.9075,
+ "step": 2061
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3736690357358253e-05,
+ "loss": 0.9009,
+ "step": 2062
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3730908988797427e-05,
+ "loss": 0.8598,
+ "step": 2063
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3725126171194543e-05,
+ "loss": 0.8945,
+ "step": 2064
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.371934190679558e-05,
+ "loss": 0.9255,
+ "step": 2065
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3713556197847076e-05,
+ "loss": 0.8482,
+ "step": 2066
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3707769046596136e-05,
+ "loss": 0.8548,
+ "step": 2067
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3701980455290425e-05,
+ "loss": 0.8895,
+ "step": 2068
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3696190426178162e-05,
+ "loss": 0.8559,
+ "step": 2069
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3690398961508128e-05,
+ "loss": 0.3352,
+ "step": 2070
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3684606063529662e-05,
+ "loss": 0.3463,
+ "step": 2071
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3678811734492659e-05,
+ "loss": 0.9215,
+ "step": 2072
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.367301597664757e-05,
+ "loss": 0.8368,
+ "step": 2073
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.36672187922454e-05,
+ "loss": 0.9162,
+ "step": 2074
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3661420183537705e-05,
+ "loss": 0.9218,
+ "step": 2075
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3655620152776605e-05,
+ "loss": 0.8654,
+ "step": 2076
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.364981870221476e-05,
+ "loss": 0.8559,
+ "step": 2077
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.364401583410539e-05,
+ "loss": 0.8737,
+ "step": 2078
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3638211550702256e-05,
+ "loss": 0.9215,
+ "step": 2079
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.363240585425968e-05,
+ "loss": 0.875,
+ "step": 2080
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362659874703253e-05,
+ "loss": 0.8382,
+ "step": 2081
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3620790231276213e-05,
+ "loss": 0.9178,
+ "step": 2082
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3614980309246692e-05,
+ "loss": 0.841,
+ "step": 2083
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609168983200474e-05,
+ "loss": 0.9028,
+ "step": 2084
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3603356255394613e-05,
+ "loss": 0.8932,
+ "step": 2085
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597542128086702e-05,
+ "loss": 0.9153,
+ "step": 2086
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3591726603534885e-05,
+ "loss": 0.919,
+ "step": 2087
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585909683997842e-05,
+ "loss": 0.9014,
+ "step": 2088
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3580091371734798e-05,
+ "loss": 0.8677,
+ "step": 2089
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357427166900552e-05,
+ "loss": 0.8769,
+ "step": 2090
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3568450578070309e-05,
+ "loss": 0.9164,
+ "step": 2091
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3562628101190015e-05,
+ "loss": 0.9132,
+ "step": 2092
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3556804240626019e-05,
+ "loss": 0.9407,
+ "step": 2093
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3550978998640241e-05,
+ "loss": 0.8397,
+ "step": 2094
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3545152377495136e-05,
+ "loss": 0.9516,
+ "step": 2095
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3539324379453698e-05,
+ "loss": 0.889,
+ "step": 2096
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3533495006779455e-05,
+ "loss": 0.8179,
+ "step": 2097
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3527664261736471e-05,
+ "loss": 0.9019,
+ "step": 2098
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3521832146589335e-05,
+ "loss": 0.9463,
+ "step": 2099
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3515998663603174e-05,
+ "loss": 0.8962,
+ "step": 2100
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3510163815043647e-05,
+ "loss": 0.8493,
+ "step": 2101
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3504327603176943e-05,
+ "loss": 0.7998,
+ "step": 2102
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3498490030269782e-05,
+ "loss": 0.8454,
+ "step": 2103
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3492651098589398e-05,
+ "loss": 0.9641,
+ "step": 2104
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3486810810403578e-05,
+ "loss": 0.9498,
+ "step": 2105
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.348096916798062e-05,
+ "loss": 0.874,
+ "step": 2106
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3475126173589343e-05,
+ "loss": 0.8762,
+ "step": 2107
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3469281829499107e-05,
+ "loss": 0.8726,
+ "step": 2108
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3463436137979786e-05,
+ "loss": 0.9508,
+ "step": 2109
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3457589101301776e-05,
+ "loss": 0.8612,
+ "step": 2110
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451740721736005e-05,
+ "loss": 0.8746,
+ "step": 2111
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3445891001553905e-05,
+ "loss": 0.8859,
+ "step": 2112
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3440039943027452e-05,
+ "loss": 0.3673,
+ "step": 2113
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3434187548429126e-05,
+ "loss": 0.9588,
+ "step": 2114
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3428333820031922e-05,
+ "loss": 0.8508,
+ "step": 2115
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3422478760109371e-05,
+ "loss": 0.3524,
+ "step": 2116
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3416622370935507e-05,
+ "loss": 0.9202,
+ "step": 2117
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3410764654784885e-05,
+ "loss": 0.8818,
+ "step": 2118
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3404905613932573e-05,
+ "loss": 0.8155,
+ "step": 2119
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3399045250654152e-05,
+ "loss": 0.8723,
+ "step": 2120
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3393183567225724e-05,
+ "loss": 0.9067,
+ "step": 2121
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3387320565923901e-05,
+ "loss": 0.8955,
+ "step": 2122
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.33814562490258e-05,
+ "loss": 0.8651,
+ "step": 2123
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3375590618809056e-05,
+ "loss": 0.8972,
+ "step": 2124
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3369723677551813e-05,
+ "loss": 0.8249,
+ "step": 2125
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3363855427532724e-05,
+ "loss": 0.3413,
+ "step": 2126
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3357985871030948e-05,
+ "loss": 0.9409,
+ "step": 2127
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3352115010326155e-05,
+ "loss": 0.8396,
+ "step": 2128
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3346242847698516e-05,
+ "loss": 0.9014,
+ "step": 2129
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3340369385428713e-05,
+ "loss": 0.9671,
+ "step": 2130
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3334494625797936e-05,
+ "loss": 0.9021,
+ "step": 2131
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3328618571087867e-05,
+ "loss": 0.8415,
+ "step": 2132
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.33227412235807e-05,
+ "loss": 0.9408,
+ "step": 2133
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3316862585559132e-05,
+ "loss": 0.8912,
+ "step": 2134
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3310982659306352e-05,
+ "loss": 0.9067,
+ "step": 2135
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305101447106064e-05,
+ "loss": 0.878,
+ "step": 2136
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3299218951242456e-05,
+ "loss": 0.8873,
+ "step": 2137
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293335174000226e-05,
+ "loss": 0.9478,
+ "step": 2138
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.328745011766456e-05,
+ "loss": 0.9111,
+ "step": 2139
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281563784521154e-05,
+ "loss": 0.8652,
+ "step": 2140
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3275676176856185e-05,
+ "loss": 0.9174,
+ "step": 2141
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269787296956333e-05,
+ "loss": 0.8871,
+ "step": 2142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3263897147108778e-05,
+ "loss": 0.8863,
+ "step": 2143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3258005729601178e-05,
+ "loss": 0.8067,
+ "step": 2144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3252113046721692e-05,
+ "loss": 0.8547,
+ "step": 2145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3246219100758974e-05,
+ "loss": 0.8756,
+ "step": 2146
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3240323894002166e-05,
+ "loss": 0.9105,
+ "step": 2147
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3234427428740895e-05,
+ "loss": 0.9473,
+ "step": 2148
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3228529707265279e-05,
+ "loss": 0.8832,
+ "step": 2149
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322263073186593e-05,
+ "loss": 0.8785,
+ "step": 2150
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3216730504833938e-05,
+ "loss": 0.8199,
+ "step": 2151
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3210829028460883e-05,
+ "loss": 0.901,
+ "step": 2152
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3204926305038832e-05,
+ "loss": 0.869,
+ "step": 2153
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3199022336860335e-05,
+ "loss": 0.9062,
+ "step": 2154
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3193117126218425e-05,
+ "loss": 0.8614,
+ "step": 2155
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3187210675406617e-05,
+ "loss": 0.9126,
+ "step": 2156
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.318130298671891e-05,
+ "loss": 0.9333,
+ "step": 2157
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3175394062449777e-05,
+ "loss": 0.8785,
+ "step": 2158
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3169483904894185e-05,
+ "loss": 0.8519,
+ "step": 2159
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3163572516347565e-05,
+ "loss": 0.903,
+ "step": 2160
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3157659899105835e-05,
+ "loss": 0.9312,
+ "step": 2161
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.315174605546538e-05,
+ "loss": 0.912,
+ "step": 2162
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3145830987723081e-05,
+ "loss": 0.8795,
+ "step": 2163
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3139914698176273e-05,
+ "loss": 0.8608,
+ "step": 2164
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3133997189122777e-05,
+ "loss": 0.8786,
+ "step": 2165
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3128078462860887e-05,
+ "loss": 0.8291,
+ "step": 2166
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122158521689367e-05,
+ "loss": 0.8889,
+ "step": 2167
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3116237367907454e-05,
+ "loss": 0.8664,
+ "step": 2168
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3110315003814855e-05,
+ "loss": 0.8604,
+ "step": 2169
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3104391431711748e-05,
+ "loss": 0.8725,
+ "step": 2170
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309846665389878e-05,
+ "loss": 0.8544,
+ "step": 2171
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309254067267707e-05,
+ "loss": 0.8509,
+ "step": 2172
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3086613490348198e-05,
+ "loss": 0.898,
+ "step": 2173
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3080685109214208e-05,
+ "loss": 0.8706,
+ "step": 2174
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3074755531577628e-05,
+ "loss": 0.8691,
+ "step": 2175
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3068824759741428e-05,
+ "loss": 0.8378,
+ "step": 2176
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.306289279600905e-05,
+ "loss": 0.8933,
+ "step": 2177
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3056959642684404e-05,
+ "loss": 0.9406,
+ "step": 2178
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.305102530207186e-05,
+ "loss": 0.8409,
+ "step": 2179
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3045089776476246e-05,
+ "loss": 0.9124,
+ "step": 2180
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3039153068202853e-05,
+ "loss": 0.8653,
+ "step": 2181
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3033215179557424e-05,
+ "loss": 0.8899,
+ "step": 2182
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3027276112846172e-05,
+ "loss": 0.9423,
+ "step": 2183
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3021335870375763e-05,
+ "loss": 0.9004,
+ "step": 2184
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3015394454453316e-05,
+ "loss": 0.9119,
+ "step": 2185
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3009451867386411e-05,
+ "loss": 0.9051,
+ "step": 2186
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3003508111483077e-05,
+ "loss": 0.9389,
+ "step": 2187
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.29975631890518e-05,
+ "loss": 0.927,
+ "step": 2188
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2991617102401524e-05,
+ "loss": 0.8467,
+ "step": 2189
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2985669853841635e-05,
+ "loss": 0.8822,
+ "step": 2190
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.297972144568198e-05,
+ "loss": 0.8567,
+ "step": 2191
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2973771880232853e-05,
+ "loss": 0.869,
+ "step": 2192
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2967821159804994e-05,
+ "loss": 0.8298,
+ "step": 2193
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961869286709594e-05,
+ "loss": 0.8769,
+ "step": 2194
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.295591626325829e-05,
+ "loss": 0.9496,
+ "step": 2195
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949962091763174e-05,
+ "loss": 0.8732,
+ "step": 2196
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2944006774536773e-05,
+ "loss": 0.9003,
+ "step": 2197
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2938050313892062e-05,
+ "loss": 0.3477,
+ "step": 2198
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2932092712142468e-05,
+ "loss": 0.8843,
+ "step": 2199
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.292613397160185e-05,
+ "loss": 0.3639,
+ "step": 2200
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2920174094584514e-05,
+ "loss": 0.8033,
+ "step": 2201
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2914213083405211e-05,
+ "loss": 0.8932,
+ "step": 2202
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2908250940379124e-05,
+ "loss": 0.8685,
+ "step": 2203
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2902287667821885e-05,
+ "loss": 0.8218,
+ "step": 2204
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.289632326804956e-05,
+ "loss": 0.9003,
+ "step": 2205
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2890357743378649e-05,
+ "loss": 0.8658,
+ "step": 2206
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2884391096126098e-05,
+ "loss": 0.3369,
+ "step": 2207
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2878423328609281e-05,
+ "loss": 0.8542,
+ "step": 2208
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2872454443146015e-05,
+ "loss": 0.8544,
+ "step": 2209
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286648444205454e-05,
+ "loss": 0.9076,
+ "step": 2210
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2860513327653537e-05,
+ "loss": 0.3334,
+ "step": 2211
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2854541102262119e-05,
+ "loss": 0.9436,
+ "step": 2212
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.284856776819983e-05,
+ "loss": 0.8395,
+ "step": 2213
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2842593327786649e-05,
+ "loss": 0.9054,
+ "step": 2214
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2836617783342968e-05,
+ "loss": 0.8798,
+ "step": 2215
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2830641137189628e-05,
+ "loss": 0.8432,
+ "step": 2216
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.282466339164789e-05,
+ "loss": 0.876,
+ "step": 2217
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2818684549039437e-05,
+ "loss": 0.9121,
+ "step": 2218
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2812704611686386e-05,
+ "loss": 0.9487,
+ "step": 2219
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2806723581911274e-05,
+ "loss": 0.889,
+ "step": 2220
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800741462037065e-05,
+ "loss": 0.8988,
+ "step": 2221
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2794758254387147e-05,
+ "loss": 0.3435,
+ "step": 2222
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2788773961285323e-05,
+ "loss": 0.8443,
+ "step": 2223
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2782788585055829e-05,
+ "loss": 0.9681,
+ "step": 2224
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2776802128023317e-05,
+ "loss": 0.8569,
+ "step": 2225
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2770814592512853e-05,
+ "loss": 0.8902,
+ "step": 2226
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2764825980849931e-05,
+ "loss": 0.8468,
+ "step": 2227
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2758836295360455e-05,
+ "loss": 0.8717,
+ "step": 2228
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2752845538370752e-05,
+ "loss": 0.7939,
+ "step": 2229
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2746853712207567e-05,
+ "loss": 0.8697,
+ "step": 2230
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.274086081919805e-05,
+ "loss": 0.8292,
+ "step": 2231
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273486686166977e-05,
+ "loss": 0.9003,
+ "step": 2232
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2728871841950719e-05,
+ "loss": 0.8565,
+ "step": 2233
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2722875762369288e-05,
+ "loss": 0.9289,
+ "step": 2234
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2716878625254287e-05,
+ "loss": 0.9051,
+ "step": 2235
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2710880432934934e-05,
+ "loss": 0.9049,
+ "step": 2236
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.270488118774086e-05,
+ "loss": 0.8711,
+ "step": 2237
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26988808920021e-05,
+ "loss": 0.9489,
+ "step": 2238
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26928795480491e-05,
+ "loss": 0.8633,
+ "step": 2239
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2686877158212715e-05,
+ "loss": 0.8904,
+ "step": 2240
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26808737248242e-05,
+ "loss": 0.8486,
+ "step": 2241
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2674869250215225e-05,
+ "loss": 0.8207,
+ "step": 2242
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2668863736717855e-05,
+ "loss": 0.8879,
+ "step": 2243
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2662857186664558e-05,
+ "loss": 0.8753,
+ "step": 2244
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2656849602388222e-05,
+ "loss": 0.8731,
+ "step": 2245
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2650840986222111e-05,
+ "loss": 0.8443,
+ "step": 2246
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2644831340499906e-05,
+ "loss": 0.839,
+ "step": 2247
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2638820667555685e-05,
+ "loss": 0.9184,
+ "step": 2248
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2632808969723927e-05,
+ "loss": 0.8163,
+ "step": 2249
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26267962493395e-05,
+ "loss": 0.9418,
+ "step": 2250
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2620782508737678e-05,
+ "loss": 0.8629,
+ "step": 2251
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614767750254129e-05,
+ "loss": 0.3414,
+ "step": 2252
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2608751976224916e-05,
+ "loss": 0.8288,
+ "step": 2253
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2602735188986498e-05,
+ "loss": 0.9099,
+ "step": 2254
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2596717390875721e-05,
+ "loss": 0.8752,
+ "step": 2255
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2590698584229834e-05,
+ "loss": 0.9098,
+ "step": 2256
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2584678771386467e-05,
+ "loss": 0.9381,
+ "step": 2257
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2578657954683651e-05,
+ "loss": 0.88,
+ "step": 2258
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2572636136459799e-05,
+ "loss": 0.9191,
+ "step": 2259
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2566613319053713e-05,
+ "loss": 0.851,
+ "step": 2260
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2560589504804592e-05,
+ "loss": 0.9334,
+ "step": 2261
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2554564696052011e-05,
+ "loss": 0.9118,
+ "step": 2262
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2548538895135942e-05,
+ "loss": 0.33,
+ "step": 2263
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.254251210439673e-05,
+ "loss": 0.8842,
+ "step": 2264
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2536484326175114e-05,
+ "loss": 0.893,
+ "step": 2265
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2530455562812214e-05,
+ "loss": 0.8388,
+ "step": 2266
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252442581664953e-05,
+ "loss": 0.8895,
+ "step": 2267
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2518395090028952e-05,
+ "loss": 0.946,
+ "step": 2268
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2512363385292739e-05,
+ "loss": 0.8648,
+ "step": 2269
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2506330704783533e-05,
+ "loss": 0.9077,
+ "step": 2270
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2500297050844367e-05,
+ "loss": 0.908,
+ "step": 2271
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2494262425818637e-05,
+ "loss": 0.9121,
+ "step": 2272
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2488226832050116e-05,
+ "loss": 0.9672,
+ "step": 2273
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2482190271882973e-05,
+ "loss": 0.8562,
+ "step": 2274
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2476152747661727e-05,
+ "loss": 0.8644,
+ "step": 2275
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2470114261731288e-05,
+ "loss": 0.8771,
+ "step": 2276
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.246407481643693e-05,
+ "loss": 0.8566,
+ "step": 2277
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.245803441412431e-05,
+ "loss": 0.8637,
+ "step": 2278
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451993057139445e-05,
+ "loss": 0.882,
+ "step": 2279
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2445950747828732e-05,
+ "loss": 0.8815,
+ "step": 2280
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439907488538934e-05,
+ "loss": 0.869,
+ "step": 2281
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.243386328161718e-05,
+ "loss": 0.9532,
+ "step": 2282
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2427818129410975e-05,
+ "loss": 0.8998,
+ "step": 2283
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2421772034268187e-05,
+ "loss": 0.8572,
+ "step": 2284
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2415724998537042e-05,
+ "loss": 0.8814,
+ "step": 2285
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2409677024566145e-05,
+ "loss": 0.9488,
+ "step": 2286
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.240362811470446e-05,
+ "loss": 0.8974,
+ "step": 2287
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2397578271301312e-05,
+ "loss": 0.9158,
+ "step": 2288
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2391527496706389e-05,
+ "loss": 0.8891,
+ "step": 2289
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2385475793269744e-05,
+ "loss": 0.8739,
+ "step": 2290
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2379423163341791e-05,
+ "loss": 0.8984,
+ "step": 2291
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2373369609273299e-05,
+ "loss": 0.8428,
+ "step": 2292
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2367315133415396e-05,
+ "loss": 0.8553,
+ "step": 2293
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2361259738119575e-05,
+ "loss": 0.8725,
+ "step": 2294
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2355203425737683e-05,
+ "loss": 0.8022,
+ "step": 2295
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2349146198621917e-05,
+ "loss": 0.8851,
+ "step": 2296
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2343088059124839e-05,
+ "loss": 0.8787,
+ "step": 2297
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2337029009599357e-05,
+ "loss": 0.3428,
+ "step": 2298
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2330969052398735e-05,
+ "loss": 0.9392,
+ "step": 2299
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2324908189876597e-05,
+ "loss": 0.8819,
+ "step": 2300
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2318846424386907e-05,
+ "loss": 0.9341,
+ "step": 2301
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2312783758283981e-05,
+ "loss": 0.9437,
+ "step": 2302
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.23067201939225e-05,
+ "loss": 0.972,
+ "step": 2303
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300655733657475e-05,
+ "loss": 0.844,
+ "step": 2304
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2294590379844268e-05,
+ "loss": 0.8892,
+ "step": 2305
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288524134838602e-05,
+ "loss": 0.9031,
+ "step": 2306
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2282457000996533e-05,
+ "loss": 0.9197,
+ "step": 2307
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2276388980674465e-05,
+ "loss": 0.8836,
+ "step": 2308
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.227032007622915e-05,
+ "loss": 0.8728,
+ "step": 2309
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2264250290017675e-05,
+ "loss": 0.8441,
+ "step": 2310
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2258179624397477e-05,
+ "loss": 0.7777,
+ "step": 2311
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2252108081726337e-05,
+ "loss": 0.8415,
+ "step": 2312
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.224603566436237e-05,
+ "loss": 0.9049,
+ "step": 2313
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2239962374664029e-05,
+ "loss": 0.8827,
+ "step": 2314
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2233888214990113e-05,
+ "loss": 0.3561,
+ "step": 2315
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2227813187699757e-05,
+ "loss": 0.8743,
+ "step": 2316
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.222173729515243e-05,
+ "loss": 0.8895,
+ "step": 2317
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2215660539707936e-05,
+ "loss": 0.9133,
+ "step": 2318
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2209582923726424e-05,
+ "loss": 0.8837,
+ "step": 2319
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2203504449568361e-05,
+ "loss": 0.9187,
+ "step": 2320
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2197425119594563e-05,
+ "loss": 0.9186,
+ "step": 2321
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.219134493616617e-05,
+ "loss": 0.8447,
+ "step": 2322
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2185263901644653e-05,
+ "loss": 0.8874,
+ "step": 2323
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.217918201839182e-05,
+ "loss": 0.9798,
+ "step": 2324
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2173099288769799e-05,
+ "loss": 0.913,
+ "step": 2325
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2167015715141057e-05,
+ "loss": 0.9146,
+ "step": 2326
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.216093129986838e-05,
+ "loss": 0.8392,
+ "step": 2327
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2154846045314885e-05,
+ "loss": 0.8755,
+ "step": 2328
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.214875995384402e-05,
+ "loss": 0.8648,
+ "step": 2329
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.214267302781955e-05,
+ "loss": 0.8526,
+ "step": 2330
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136585269605558e-05,
+ "loss": 0.9036,
+ "step": 2331
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2130496681566475e-05,
+ "loss": 0.8257,
+ "step": 2332
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.212440726606703e-05,
+ "loss": 0.8487,
+ "step": 2333
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.211831702547228e-05,
+ "loss": 0.8688,
+ "step": 2334
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2112225962147605e-05,
+ "loss": 0.8857,
+ "step": 2335
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.210613407845871e-05,
+ "loss": 0.3028,
+ "step": 2336
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2100041376771605e-05,
+ "loss": 0.849,
+ "step": 2337
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209394785945263e-05,
+ "loss": 0.8291,
+ "step": 2338
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2087853528868432e-05,
+ "loss": 0.8964,
+ "step": 2339
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2081758387385982e-05,
+ "loss": 0.8533,
+ "step": 2340
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2075662437372567e-05,
+ "loss": 0.9254,
+ "step": 2341
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2069565681195776e-05,
+ "loss": 0.34,
+ "step": 2342
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.206346812122352e-05,
+ "loss": 0.8912,
+ "step": 2343
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2057369759824025e-05,
+ "loss": 0.9265,
+ "step": 2344
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2051270599365825e-05,
+ "loss": 0.9092,
+ "step": 2345
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2045170642217756e-05,
+ "loss": 0.8999,
+ "step": 2346
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2039069890748978e-05,
+ "loss": 0.9081,
+ "step": 2347
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2032968347328952e-05,
+ "loss": 0.8721,
+ "step": 2348
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2026866014327446e-05,
+ "loss": 0.92,
+ "step": 2349
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2020762894114535e-05,
+ "loss": 0.8671,
+ "step": 2350
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.20146589890606e-05,
+ "loss": 0.902,
+ "step": 2351
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2008554301536328e-05,
+ "loss": 0.8555,
+ "step": 2352
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2002448833912712e-05,
+ "loss": 0.8908,
+ "step": 2353
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1996342588561042e-05,
+ "loss": 0.862,
+ "step": 2354
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1990235567852917e-05,
+ "loss": 0.8971,
+ "step": 2355
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984127774160226e-05,
+ "loss": 0.8591,
+ "step": 2356
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1978019209855174e-05,
+ "loss": 0.8669,
+ "step": 2357
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1971909877310253e-05,
+ "loss": 0.807,
+ "step": 2358
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1965799778898258e-05,
+ "loss": 0.9165,
+ "step": 2359
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1959688916992279e-05,
+ "loss": 0.8866,
+ "step": 2360
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1953577293965707e-05,
+ "loss": 0.9545,
+ "step": 2361
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1947464912192228e-05,
+ "loss": 0.953,
+ "step": 2362
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1941351774045815e-05,
+ "loss": 0.3617,
+ "step": 2363
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1935237881900743e-05,
+ "loss": 0.8426,
+ "step": 2364
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1929123238131579e-05,
+ "loss": 0.8996,
+ "step": 2365
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1923007845113178e-05,
+ "loss": 0.9363,
+ "step": 2366
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1916891705220689e-05,
+ "loss": 0.792,
+ "step": 2367
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.191077482082955e-05,
+ "loss": 0.8611,
+ "step": 2368
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1904657194315486e-05,
+ "loss": 0.8716,
+ "step": 2369
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1898538828054517e-05,
+ "loss": 0.8647,
+ "step": 2370
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1892419724422946e-05,
+ "loss": 0.8368,
+ "step": 2371
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1886299885797357e-05,
+ "loss": 0.8444,
+ "step": 2372
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1880179314554629e-05,
+ "loss": 0.8683,
+ "step": 2373
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1874058013071923e-05,
+ "loss": 0.8961,
+ "step": 2374
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1867935983726676e-05,
+ "loss": 0.8572,
+ "step": 2375
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.186181322889662e-05,
+ "loss": 0.8506,
+ "step": 2376
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1855689750959759e-05,
+ "loss": 0.8712,
+ "step": 2377
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1849565552294379e-05,
+ "loss": 0.3329,
+ "step": 2378
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1843440635279056e-05,
+ "loss": 0.893,
+ "step": 2379
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1837315002292629e-05,
+ "loss": 0.8584,
+ "step": 2380
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1831188655714225e-05,
+ "loss": 0.876,
+ "step": 2381
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.182506159792325e-05,
+ "loss": 0.9164,
+ "step": 2382
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1818933831299381e-05,
+ "loss": 0.3203,
+ "step": 2383
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1812805358222571e-05,
+ "loss": 0.8582,
+ "step": 2384
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.180667618107305e-05,
+ "loss": 0.8843,
+ "step": 2385
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1800546302231317e-05,
+ "loss": 0.8675,
+ "step": 2386
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1794415724078147e-05,
+ "loss": 0.834,
+ "step": 2387
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1788284448994588e-05,
+ "loss": 0.8438,
+ "step": 2388
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1782152479361956e-05,
+ "loss": 0.8646,
+ "step": 2389
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1776019817561834e-05,
+ "loss": 0.9151,
+ "step": 2390
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769886465976086e-05,
+ "loss": 0.8176,
+ "step": 2391
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1763752426986823e-05,
+ "loss": 0.9262,
+ "step": 2392
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1757617702976443e-05,
+ "loss": 0.832,
+ "step": 2393
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.17514822963276e-05,
+ "loss": 0.8366,
+ "step": 2394
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1745346209423216e-05,
+ "loss": 0.858,
+ "step": 2395
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1739209444646479e-05,
+ "loss": 0.3131,
+ "step": 2396
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1733072004380827e-05,
+ "loss": 0.9138,
+ "step": 2397
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1726933891009985e-05,
+ "loss": 0.847,
+ "step": 2398
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1720795106917917e-05,
+ "loss": 0.927,
+ "step": 2399
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.171465565448886e-05,
+ "loss": 0.8945,
+ "step": 2400
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1708515536107299e-05,
+ "loss": 0.8702,
+ "step": 2401
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1702374754157998e-05,
+ "loss": 0.9371,
+ "step": 2402
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1696233311025957e-05,
+ "loss": 0.8477,
+ "step": 2403
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1690091209096441e-05,
+ "loss": 0.8746,
+ "step": 2404
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1683948450754976e-05,
+ "loss": 0.8714,
+ "step": 2405
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1677805038387337e-05,
+ "loss": 0.8483,
+ "step": 2406
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1671660974379554e-05,
+ "loss": 0.8766,
+ "step": 2407
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1665516261117914e-05,
+ "loss": 0.8786,
+ "step": 2408
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1659370900988946e-05,
+ "loss": 0.9222,
+ "step": 2409
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.165322489637944e-05,
+ "loss": 0.8537,
+ "step": 2410
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.164707824967644e-05,
+ "loss": 0.3372,
+ "step": 2411
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1640930963267226e-05,
+ "loss": 0.8816,
+ "step": 2412
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1634783039539328e-05,
+ "loss": 0.91,
+ "step": 2413
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.162863448088054e-05,
+ "loss": 0.8229,
+ "step": 2414
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1622485289678886e-05,
+ "loss": 0.8628,
+ "step": 2415
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1616335468322641e-05,
+ "loss": 0.929,
+ "step": 2416
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1610185019200324e-05,
+ "loss": 0.8675,
+ "step": 2417
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1604033944700701e-05,
+ "loss": 0.8585,
+ "step": 2418
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1597882247212776e-05,
+ "loss": 0.8846,
+ "step": 2419
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.15917299291258e-05,
+ "loss": 0.9459,
+ "step": 2420
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1585576992829261e-05,
+ "loss": 0.8492,
+ "step": 2421
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1579423440712887e-05,
+ "loss": 0.8401,
+ "step": 2422
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1573269275166652e-05,
+ "loss": 0.839,
+ "step": 2423
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1567114498580758e-05,
+ "loss": 0.876,
+ "step": 2424
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1560959113345649e-05,
+ "loss": 0.8728,
+ "step": 2425
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1554803121852005e-05,
+ "loss": 0.8891,
+ "step": 2426
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1548646526490749e-05,
+ "loss": 0.8827,
+ "step": 2427
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1542489329653024e-05,
+ "loss": 0.8866,
+ "step": 2428
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.153633153373022e-05,
+ "loss": 0.3399,
+ "step": 2429
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1530173141113947e-05,
+ "loss": 0.9129,
+ "step": 2430
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1524014154196063e-05,
+ "loss": 0.8873,
+ "step": 2431
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1517854575368644e-05,
+ "loss": 0.9095,
+ "step": 2432
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1511694407023994e-05,
+ "loss": 0.845,
+ "step": 2433
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1505533651554654e-05,
+ "loss": 0.9395,
+ "step": 2434
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1499372311353398e-05,
+ "loss": 0.8332,
+ "step": 2435
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.149321038881321e-05,
+ "loss": 0.8609,
+ "step": 2436
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1487047886327314e-05,
+ "loss": 0.8932,
+ "step": 2437
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1480884806289151e-05,
+ "loss": 0.8232,
+ "step": 2438
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1474721151092397e-05,
+ "loss": 0.8671,
+ "step": 2439
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1468556923130943e-05,
+ "loss": 0.3353,
+ "step": 2440
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14623921247989e-05,
+ "loss": 0.8704,
+ "step": 2441
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1456226758490603e-05,
+ "loss": 0.8445,
+ "step": 2442
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1450060826600618e-05,
+ "loss": 0.814,
+ "step": 2443
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1443894331523718e-05,
+ "loss": 0.8857,
+ "step": 2444
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437727275654893e-05,
+ "loss": 0.9106,
+ "step": 2445
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1431559661389362e-05,
+ "loss": 0.8441,
+ "step": 2446
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1425391491122557e-05,
+ "loss": 0.3749,
+ "step": 2447
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.141922276725012e-05,
+ "loss": 0.8948,
+ "step": 2448
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1413053492167915e-05,
+ "loss": 0.9103,
+ "step": 2449
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1406883668272015e-05,
+ "loss": 0.8903,
+ "step": 2450
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.140071329795871e-05,
+ "loss": 0.8281,
+ "step": 2451
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.13945423836245e-05,
+ "loss": 0.9291,
+ "step": 2452
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1388370927666102e-05,
+ "loss": 0.8681,
+ "step": 2453
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1382198932480429e-05,
+ "loss": 0.84,
+ "step": 2454
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1376026400464616e-05,
+ "loss": 0.884,
+ "step": 2455
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.136985333401601e-05,
+ "loss": 0.9238,
+ "step": 2456
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1363679735532151e-05,
+ "loss": 0.8118,
+ "step": 2457
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1357505607410797e-05,
+ "loss": 0.8997,
+ "step": 2458
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1351330952049908e-05,
+ "loss": 0.3291,
+ "step": 2459
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1345155771847646e-05,
+ "loss": 0.8934,
+ "step": 2460
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1338980069202388e-05,
+ "loss": 0.839,
+ "step": 2461
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1332803846512697e-05,
+ "loss": 0.8599,
+ "step": 2462
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1326627106177348e-05,
+ "loss": 0.8432,
+ "step": 2463
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.132044985059532e-05,
+ "loss": 0.8947,
+ "step": 2464
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1314272082165785e-05,
+ "loss": 0.8495,
+ "step": 2465
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1308093803288119e-05,
+ "loss": 0.9166,
+ "step": 2466
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130191501636189e-05,
+ "loss": 0.8832,
+ "step": 2467
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1295735723786872e-05,
+ "loss": 0.8477,
+ "step": 2468
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1289555927963032e-05,
+ "loss": 0.8675,
+ "step": 2469
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283375631290528e-05,
+ "loss": 0.8931,
+ "step": 2470
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1277194836169714e-05,
+ "loss": 0.8718,
+ "step": 2471
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1271013545001144e-05,
+ "loss": 0.9066,
+ "step": 2472
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1264831760185562e-05,
+ "loss": 0.9353,
+ "step": 2473
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1258649484123895e-05,
+ "loss": 0.83,
+ "step": 2474
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1252466719217274e-05,
+ "loss": 0.8601,
+ "step": 2475
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1246283467867012e-05,
+ "loss": 0.856,
+ "step": 2476
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1240099732474613e-05,
+ "loss": 0.8202,
+ "step": 2477
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1233915515441765e-05,
+ "loss": 0.8848,
+ "step": 2478
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1227730819170349e-05,
+ "loss": 0.8669,
+ "step": 2479
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1221545646062431e-05,
+ "loss": 0.8553,
+ "step": 2480
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.121535999852026e-05,
+ "loss": 0.9064,
+ "step": 2481
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1209173878946271e-05,
+ "loss": 0.8003,
+ "step": 2482
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1202987289743078e-05,
+ "loss": 0.7714,
+ "step": 2483
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1196800233313488e-05,
+ "loss": 0.8098,
+ "step": 2484
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1190612712060475e-05,
+ "loss": 0.9308,
+ "step": 2485
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1184424728387204e-05,
+ "loss": 0.8383,
+ "step": 2486
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1178236284697017e-05,
+ "loss": 0.9422,
+ "step": 2487
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1172047383393434e-05,
+ "loss": 0.9478,
+ "step": 2488
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1165858026880151e-05,
+ "loss": 0.8395,
+ "step": 2489
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1159668217561048e-05,
+ "loss": 0.883,
+ "step": 2490
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115347795784017e-05,
+ "loss": 0.8678,
+ "step": 2491
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1147287250121745e-05,
+ "loss": 0.8559,
+ "step": 2492
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1141096096810174e-05,
+ "loss": 0.8701,
+ "step": 2493
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1134904500310029e-05,
+ "loss": 0.9352,
+ "step": 2494
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1128712463026048e-05,
+ "loss": 0.9426,
+ "step": 2495
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1122519987363156e-05,
+ "loss": 0.89,
+ "step": 2496
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116327075726436e-05,
+ "loss": 0.8317,
+ "step": 2497
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1110133730521142e-05,
+ "loss": 0.914,
+ "step": 2498
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.11039399541527e-05,
+ "loss": 0.8242,
+ "step": 2499
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10977457490267e-05,
+ "loss": 0.8806,
+ "step": 2500
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5197,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 500,
+ "total_flos": 6922247222984704.0,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/training_args.bin b/llava-v1.5-7b-concat-16/checkpoint-2500/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..b5632db25e85f4a6440989c9cf4e5eb45e67ccd6
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e1397f63ab71c83d4546fd5cc220108e4e3680c17b2f7501e2a09ab729de344
+size 6712
diff --git a/llava-v1.5-7b-concat-16/checkpoint-2500/zero_to_fp32.py b/llava-v1.5-7b-concat-16/checkpoint-2500/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..c98caae31534368be22b67fc4ae906836c992a8d
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-2500/zero_to_fp32.py
@@ -0,0 +1,587 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, args.output_file, tag=args.tag)
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/config.json b/llava-v1.5-7b-concat-16/checkpoint-3000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..5b87d31c757ff1906899f1e3a1d047752a0c5005
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/config.json
@@ -0,0 +1,44 @@
+{
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "pad",
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1536,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "Leonardo6/clip-12m-16-roberta4",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/generation_config.json b/llava-v1.5-7b-concat-16/checkpoint-3000/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f686e33d0dd24a8bc304bf932f5bc12717579f0b
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/generation_config.json
@@ -0,0 +1,11 @@
+{
+ "attn_implementation": "flash_attention_2",
+ "bos_token_id": 1,
+ "do_sample": true,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "temperature": 0.9,
+ "top_p": 0.6,
+ "transformers_version": "4.37.2"
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/latest b/llava-v1.5-7b-concat-16/checkpoint-3000/latest
new file mode 100644
index 0000000000000000000000000000000000000000..1d7ba373feda3875a77d01589035020625a7c98e
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/latest
@@ -0,0 +1 @@
+global_step3000
\ No newline at end of file
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/model.safetensors.index.json b/llava-v1.5-7b-concat-16/checkpoint-3000/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..51cd1fe67b08db18738439b039f9eec8e67fa02f
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/model.safetensors.index.json
@@ -0,0 +1,701 @@
+{
+ "metadata": {
+ "total_size": 13867362304
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.class_embedding": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.patch_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.position_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.cls_token": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.position_embeddings": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_0.pth b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..44bb770e4c85b7b758a6b2962384781d026daabd
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af2966def51ea1ab87d97a757bd22e7f72001f21baee1a67abfc367e92e2e402
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_1.pth b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..75cd02b1fceb1b3b1aae40cf4857ce2cea6fd436
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8cf5883933ffd2749908af2fffabf58c748ecc9afbc507bfa1868172477bbf0c
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_2.pth b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..127a0ab4fb3652fab0edcb4ecc63af17870be47c
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b4f7265b828abac3132886540e39210cab53edc42ddf0389bd517ccd5c1ca42d
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_3.pth b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_3.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a696a927c56c2b5ca8cb6f3d71f9ca36a1ae9fea
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/rng_state_3.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1ebdec8d90b17c1d6090b2bc79535cba013a72aa00b297c128236362564f916
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/scheduler.pt b/llava-v1.5-7b-concat-16/checkpoint-3000/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..7c378487cc38a2af9750cbb005d0f9d5f07b3abe
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:97b4e8a1d1bca005b0ccf6dace8b874ef4c516d1c1c1d0aec4387f34dbba7ab1
+size 1064
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/special_tokens_map.json b/llava-v1.5-7b-concat-16/checkpoint-3000/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/tokenizer.model b/llava-v1.5-7b-concat-16/checkpoint-3000/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/tokenizer_config.json b/llava-v1.5-7b-concat-16/checkpoint-3000/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/trainer_state.json b/llava-v1.5-7b-concat-16/checkpoint-3000/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..57783846802093636ca6173732079b33dd5111ab
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/trainer_state.json
@@ -0,0 +1,18021 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.577172815160406,
+ "eval_steps": 500,
+ "global_step": 3000,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 1.437,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.4396,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.4201,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.4526,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.4033,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.4341,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.455,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.4195,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 1.4367,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.3549,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.3929,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.3577,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.3198,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.242,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.2693,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.3043,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.2034,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.1896,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.2483,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.1324,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.2191,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.1962,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.125,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.2311,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.1687,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.1214,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.1449,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.139,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.0864,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.1032,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.1475,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.0742,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.1101,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.0727,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.0478,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 1.099,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 0.3001,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.095,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 1.0828,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.0715,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.0794,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.1222,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.0315,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 1.0473,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 1.1067,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 1.0335,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 1.0009,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.0086,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0027,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 1.0066,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 1.0375,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 1.0491,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.0522,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 0.9977,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0516,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 0.3065,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 1.057,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 1.0589,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 1.0796,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 1.0433,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 0.9848,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 1.0166,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 0.9902,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 1.0357,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 0.9981,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 0.9887,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 0.9445,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0034,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 0.9865,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.0095,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 0.988,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 0.9673,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 1.0383,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 0.9842,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 0.9988,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 0.9715,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 0.9306,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 1.0179,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 1.0813,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 0.9825,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 1.0437,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 1.0863,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 1.0367,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 0.9808,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 0.9815,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 1.0001,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 1.0443,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 1.0108,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 0.2945,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 0.9728,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 1.0072,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 1.0504,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 1.0167,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 0.9801,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 0.307,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 0.9832,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 1.0245,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 1.016,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 0.9245,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 0.2833,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 0.9694,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0847,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 0.9805,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 1.0026,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 0.2628,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 0.9652,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 1.0551,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 0.9897,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 1.0074,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 0.9967,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 0.9988,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 1.07,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.9754,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 1.022,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 0.9851,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 0.2876,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 1.0329,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 0.9843,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 1.0554,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 0.9708,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 1.0252,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 1.0464,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 0.9278,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 1.0028,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 1.0319,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 0.9877,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 1.067,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.8855,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9933,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.0038,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 1.0337,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9185,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.9293,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9535,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 0.9931,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.934,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 0.9966,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 1.018,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 0.9646,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 1.0316,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 1.0237,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 1.0058,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 1.0256,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.9973,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 0.9952,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 0.9886,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 0.9792,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 0.9918,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 0.9943,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 1.0113,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 1.0034,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 0.2836,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 0.9962,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 0.9848,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9459,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 1.06,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999998058057616e-05,
+ "loss": 1.0001,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999992232231216e-05,
+ "loss": 1.0235,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999998252252306e-05,
+ "loss": 0.9819,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999968928936924e-05,
+ "loss": 0.9859,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999995145147809e-05,
+ "loss": 0.9607,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999930090153335e-05,
+ "loss": 0.9999,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999904844970963e-05,
+ "loss": 0.9986,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999987571594078e-05,
+ "loss": 0.337,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.99998427030741e-05,
+ "loss": 0.9978,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999980580638374e-05,
+ "loss": 1.0083,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999976502588403e-05,
+ "loss": 0.9703,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999720361590812e-05,
+ "loss": 0.9653,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999671813521435e-05,
+ "loss": 0.9899,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999961938169475e-05,
+ "loss": 0.9462,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999563066131124e-05,
+ "loss": 0.8944,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999502866852427e-05,
+ "loss": 1.0217,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999943878388204e-05,
+ "loss": 0.9505,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999370817244853e-05,
+ "loss": 0.9858,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999298966967264e-05,
+ "loss": 1.0156,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999223233077178e-05,
+ "loss": 1.001,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999914361560401e-05,
+ "loss": 0.9823,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999060114578682e-05,
+ "loss": 0.9295,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998972730033624e-05,
+ "loss": 0.9641,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998881462002778e-05,
+ "loss": 0.2889,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998786310521585e-05,
+ "loss": 0.9556,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998687275627008e-05,
+ "loss": 1.0336,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998584357357503e-05,
+ "loss": 0.9954,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998477555753054e-05,
+ "loss": 0.958,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998366870855134e-05,
+ "loss": 1.0338,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999825230270673e-05,
+ "loss": 0.982,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998133851352342e-05,
+ "loss": 0.3328,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998011516837974e-05,
+ "loss": 0.9857,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999788529921114e-05,
+ "loss": 0.917,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999775519852086e-05,
+ "loss": 0.2945,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999762121481767e-05,
+ "loss": 0.9773,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99974833481536e-05,
+ "loss": 0.9617,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997341598582197e-05,
+ "loss": 1.0578,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997195966158518e-05,
+ "loss": 0.9984,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997046450939122e-05,
+ "loss": 0.9619,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996893052982083e-05,
+ "loss": 1.0214,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996735772346973e-05,
+ "loss": 0.9952,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996574609094887e-05,
+ "loss": 1.0151,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996409563288404e-05,
+ "loss": 0.9638,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996240634991645e-05,
+ "loss": 0.9891,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996067824270204e-05,
+ "loss": 1.0223,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999589113119121e-05,
+ "loss": 1.0309,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995710555823277e-05,
+ "loss": 1.0079,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999552609823655e-05,
+ "loss": 0.9522,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999533775850266e-05,
+ "loss": 0.3102,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995145536694764e-05,
+ "loss": 0.9981,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994949432887512e-05,
+ "loss": 0.9842,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999474944715708e-05,
+ "loss": 0.9885,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994545579581125e-05,
+ "loss": 1.0181,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994337830238836e-05,
+ "loss": 0.9843,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994126199210897e-05,
+ "loss": 0.9757,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999391068657951e-05,
+ "loss": 0.9023,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993691292428364e-05,
+ "loss": 0.9472,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993468016842684e-05,
+ "loss": 0.9836,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999324085990918e-05,
+ "loss": 0.9871,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993009821716076e-05,
+ "loss": 1.0082,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992774902353104e-05,
+ "loss": 0.2744,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999253610191151e-05,
+ "loss": 0.3193,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999229342048404e-05,
+ "loss": 1.0274,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992046858164942e-05,
+ "loss": 0.2843,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999179641504999e-05,
+ "loss": 0.982,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991542091236438e-05,
+ "loss": 0.976,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991283886823075e-05,
+ "loss": 1.0374,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991021801910177e-05,
+ "loss": 1.0289,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999075583659954e-05,
+ "loss": 0.9761,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999048599099446e-05,
+ "loss": 0.2977,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990212265199738e-05,
+ "loss": 0.9407,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998993465932169e-05,
+ "loss": 1.0007,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989653173468137e-05,
+ "loss": 0.9877,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99893678077484e-05,
+ "loss": 0.9289,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989078562273313e-05,
+ "loss": 0.9585,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988785437155222e-05,
+ "loss": 0.9449,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988488432507963e-05,
+ "loss": 1.0345,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9988187548446895e-05,
+ "loss": 0.965,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998788278508888e-05,
+ "loss": 0.9971,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987574142552274e-05,
+ "loss": 0.9898,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987261620956964e-05,
+ "loss": 0.9984,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986945220424326e-05,
+ "loss": 1.0083,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998662494107724e-05,
+ "loss": 0.9371,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99863007830401e-05,
+ "loss": 1.024,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985972746438815e-05,
+ "loss": 1.0131,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985640831400778e-05,
+ "loss": 0.923,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998530503805491e-05,
+ "loss": 0.9833,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984965366531624e-05,
+ "loss": 0.981,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984621816962843e-05,
+ "loss": 0.9922,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984274389482005e-05,
+ "loss": 1.037,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983923084224047e-05,
+ "loss": 0.9879,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983567901325404e-05,
+ "loss": 0.9919,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983208840924028e-05,
+ "loss": 0.9303,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998284590315937e-05,
+ "loss": 0.9406,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982479088172403e-05,
+ "loss": 0.3251,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982108396105584e-05,
+ "loss": 0.9975,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981733827102884e-05,
+ "loss": 0.9497,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998135538130979e-05,
+ "loss": 0.9562,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998097305887328e-05,
+ "loss": 1.0052,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980586859941846e-05,
+ "loss": 0.9342,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998019678466548e-05,
+ "loss": 0.9237,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997980283319568e-05,
+ "loss": 0.9744,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979405005685466e-05,
+ "loss": 1.0382,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979003302289336e-05,
+ "loss": 0.9797,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997859772316331e-05,
+ "loss": 0.9955,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978188268464912e-05,
+ "loss": 0.8648,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997777493835317e-05,
+ "loss": 0.9995,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977357732988616e-05,
+ "loss": 0.9618,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976936652533288e-05,
+ "loss": 0.9682,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997651169715073e-05,
+ "loss": 0.9777,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976082867005985e-05,
+ "loss": 0.9652,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997565016226561e-05,
+ "loss": 1.0588,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997521358309766e-05,
+ "loss": 0.8892,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99747731296717e-05,
+ "loss": 0.9918,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9974328802158798e-05,
+ "loss": 0.9888,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997388060073152e-05,
+ "loss": 1.022,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973428525563948e-05,
+ "loss": 0.9987,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972972576831656e-05,
+ "loss": 0.9734,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972512754711738e-05,
+ "loss": 1.049,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997204905938278e-05,
+ "loss": 0.9298,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9971581491024873e-05,
+ "loss": 1.0159,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997111004981962e-05,
+ "loss": 0.9998,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970634735950117e-05,
+ "loss": 1.0013,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970155549600978e-05,
+ "loss": 0.9775,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969672490958304e-05,
+ "loss": 0.9639,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996918556020972e-05,
+ "loss": 0.9604,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996869475754434e-05,
+ "loss": 0.9464,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968200083152784e-05,
+ "loss": 0.9963,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9967701537227175e-05,
+ "loss": 0.9389,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996719911996115e-05,
+ "loss": 0.9534,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996669283154984e-05,
+ "loss": 0.8961,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996618267218988e-05,
+ "loss": 0.9537,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996566864207941e-05,
+ "loss": 0.9773,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9965150741418072e-05,
+ "loss": 0.9985,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964628970407018e-05,
+ "loss": 1.0109,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964103329248892e-05,
+ "loss": 0.9856,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996357381814785e-05,
+ "loss": 0.9531,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996304043730955e-05,
+ "loss": 1.0416,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962503186941143e-05,
+ "loss": 0.902,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961962067251298e-05,
+ "loss": 1.0041,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961417078450177e-05,
+ "loss": 0.3216,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996086822074945e-05,
+ "loss": 0.9695,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960315494362286e-05,
+ "loss": 1.0055,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959758899503355e-05,
+ "loss": 0.9274,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995919843638883e-05,
+ "loss": 1.0085,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958634105236395e-05,
+ "loss": 1.024,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958065906265228e-05,
+ "loss": 0.9575,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957493839696013e-05,
+ "loss": 0.931,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956917905750926e-05,
+ "loss": 1.014,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995633810465366e-05,
+ "loss": 0.9083,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99557544366294e-05,
+ "loss": 1.0107,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955166901904838e-05,
+ "loss": 0.9126,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954575500708164e-05,
+ "loss": 0.9656,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995398023326907e-05,
+ "loss": 0.95,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953381099818756e-05,
+ "loss": 0.9424,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952778100589912e-05,
+ "loss": 0.8988,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952171235816747e-05,
+ "loss": 1.0329,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9951560505734948e-05,
+ "loss": 1.0457,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950945910581718e-05,
+ "loss": 0.8971,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950327450595766e-05,
+ "loss": 0.9726,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949705126017286e-05,
+ "loss": 0.9883,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949078937087988e-05,
+ "loss": 0.987,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994844888405107e-05,
+ "loss": 0.9479,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947814967151246e-05,
+ "loss": 0.9239,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947177186634716e-05,
+ "loss": 0.9383,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946535542749187e-05,
+ "loss": 0.2709,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945890035743866e-05,
+ "loss": 1.053,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945240665869465e-05,
+ "loss": 0.9982,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9944587433378187e-05,
+ "loss": 1.0055,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994393033852374e-05,
+ "loss": 0.9182,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943269381561334e-05,
+ "loss": 1.0582,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994260456274768e-05,
+ "loss": 0.9732,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941935882340976e-05,
+ "loss": 0.9413,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994126334060094e-05,
+ "loss": 0.9014,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994058693778878e-05,
+ "loss": 0.9939,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939906674167192e-05,
+ "loss": 0.9712,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993922255000039e-05,
+ "loss": 0.9642,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993853456555408e-05,
+ "loss": 0.9423,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937842721095468e-05,
+ "loss": 1.0095,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937147016893257e-05,
+ "loss": 0.9853,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9936447453217646e-05,
+ "loss": 0.9414,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935744030340347e-05,
+ "loss": 0.9975,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935036748534555e-05,
+ "loss": 1.0131,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993432560807497e-05,
+ "loss": 0.9472,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993361060923779e-05,
+ "loss": 0.9754,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932891752300717e-05,
+ "loss": 0.9018,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932169037542947e-05,
+ "loss": 0.9971,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9931442465245164e-05,
+ "loss": 0.9472,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930712035689576e-05,
+ "loss": 0.9566,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992997774915986e-05,
+ "loss": 0.9609,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992923960594121e-05,
+ "loss": 1.0305,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9928497606320308e-05,
+ "loss": 0.9794,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992775175058535e-05,
+ "loss": 0.9911,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927002039026002e-05,
+ "loss": 0.8958,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9926248471933453e-05,
+ "loss": 0.998,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9925491049600382e-05,
+ "loss": 0.3126,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924729772320953e-05,
+ "loss": 0.9474,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923964640390846e-05,
+ "loss": 1.0056,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923195654107227e-05,
+ "loss": 0.952,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992242281376876e-05,
+ "loss": 0.9329,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921646119675606e-05,
+ "loss": 0.9567,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920865572129426e-05,
+ "loss": 1.0107,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920081171433377e-05,
+ "loss": 1.0179,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991929291789211e-05,
+ "loss": 1.0085,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918500811811778e-05,
+ "loss": 0.9612,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991770485350002e-05,
+ "loss": 0.9569,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991690504326597e-05,
+ "loss": 0.9991,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916101381420285e-05,
+ "loss": 0.9678,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9915293868275083e-05,
+ "loss": 0.9505,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9914482504143996e-05,
+ "loss": 0.9855,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9913667289342147e-05,
+ "loss": 0.9686,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991284822418616e-05,
+ "loss": 0.9202,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9912025308994146e-05,
+ "loss": 0.9758,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9911198544085723e-05,
+ "loss": 1.0149,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991036792978199e-05,
+ "loss": 1.0112,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990953346640555e-05,
+ "loss": 0.9875,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9908695154280496e-05,
+ "loss": 0.9437,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907852993732425e-05,
+ "loss": 0.9477,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990700698508842e-05,
+ "loss": 0.9369,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990615712867706e-05,
+ "loss": 0.9131,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9905303424828418e-05,
+ "loss": 1.022,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904445873874068e-05,
+ "loss": 0.9213,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9903584476147066e-05,
+ "loss": 0.9923,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902719231981975e-05,
+ "loss": 0.9728,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9901850141714843e-05,
+ "loss": 0.9763,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900977205683213e-05,
+ "loss": 0.9372,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900100424226124e-05,
+ "loss": 0.9181,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9899219797684113e-05,
+ "loss": 0.9654,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.98983353263992e-05,
+ "loss": 0.9802,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897447010714905e-05,
+ "loss": 0.9552,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989655485097624e-05,
+ "loss": 0.9995,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989565884752971e-05,
+ "loss": 0.9242,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9894759000723308e-05,
+ "loss": 1.0011,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893855310906526e-05,
+ "loss": 0.9923,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892947778430352e-05,
+ "loss": 0.9929,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892036403647256e-05,
+ "loss": 1.0172,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891121186911207e-05,
+ "loss": 0.9918,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9890202128577664e-05,
+ "loss": 0.9967,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988927922900358e-05,
+ "loss": 0.9102,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9888352488547397e-05,
+ "loss": 0.9903,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988742190756905e-05,
+ "loss": 0.9354,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9886487486429966e-05,
+ "loss": 1.0028,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9885549225493064e-05,
+ "loss": 0.9658,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9884607125122753e-05,
+ "loss": 0.94,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988366118568494e-05,
+ "loss": 0.9509,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988271140754701e-05,
+ "loss": 0.9642,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881757791077848e-05,
+ "loss": 0.9849,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9880800336647825e-05,
+ "loss": 0.9676,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987983904462881e-05,
+ "loss": 0.9746,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9878873915394154e-05,
+ "loss": 1.0209,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9877904949318704e-05,
+ "loss": 0.9741,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9876932146778796e-05,
+ "loss": 0.9492,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875955508152254e-05,
+ "loss": 0.2791,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987497503381839e-05,
+ "loss": 0.2944,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9873990724158014e-05,
+ "loss": 0.9199,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987300257955342e-05,
+ "loss": 0.96,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987201060038839e-05,
+ "loss": 0.9848,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9871014787048197e-05,
+ "loss": 0.985,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9870015139919606e-05,
+ "loss": 0.9003,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869011659390866e-05,
+ "loss": 1.0227,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9868004345851716e-05,
+ "loss": 0.9831,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9866993199693393e-05,
+ "loss": 0.9358,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98659782213086e-05,
+ "loss": 0.9757,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986495941109156e-05,
+ "loss": 1.0239,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863936769437956e-05,
+ "loss": 0.9802,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986291029674497e-05,
+ "loss": 0.9725,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986187999341128e-05,
+ "loss": 1.008,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9860845859837034e-05,
+ "loss": 0.9516,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985980789642388e-05,
+ "loss": 0.8976,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985876610357496e-05,
+ "loss": 0.9699,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9857720481694887e-05,
+ "loss": 0.9561,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856671031189765e-05,
+ "loss": 0.9651,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98556177524672e-05,
+ "loss": 0.9456,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9854560645936262e-05,
+ "loss": 1.0112,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9853499712007523e-05,
+ "loss": 0.9635,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852434951093035e-05,
+ "loss": 0.8902,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985136636360635e-05,
+ "loss": 0.9033,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985029394996248e-05,
+ "loss": 0.2969,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9849217710577945e-05,
+ "loss": 0.3144,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9848137645870745e-05,
+ "loss": 0.9346,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847053756260363e-05,
+ "loss": 0.9726,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984596604216777e-05,
+ "loss": 0.9481,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984487450401542e-05,
+ "loss": 0.9606,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9843779142227258e-05,
+ "loss": 0.8909,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9842679957228706e-05,
+ "loss": 1.0256,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9841576949446675e-05,
+ "loss": 0.9613,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984047011930956e-05,
+ "loss": 0.9351,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9839359467247243e-05,
+ "loss": 0.9766,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983824499369109e-05,
+ "loss": 1.0027,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9837126699073948e-05,
+ "loss": 0.9637,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836004583830146e-05,
+ "loss": 0.9507,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9834878648395507e-05,
+ "loss": 0.9815,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9833748893207326e-05,
+ "loss": 0.9587,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9832615318704388e-05,
+ "loss": 0.8957,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831477925326962e-05,
+ "loss": 1.0338,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98303367135168e-05,
+ "loss": 0.9901,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829191683717133e-05,
+ "loss": 0.9134,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9828042836372677e-05,
+ "loss": 0.9366,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9826890171929634e-05,
+ "loss": 0.9063,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982573369083568e-05,
+ "loss": 0.936,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824573393539984e-05,
+ "loss": 0.9738,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982340928049319e-05,
+ "loss": 0.9805,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241352147426e-05,
+ "loss": 0.9481,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9821069608956307e-05,
+ "loss": 0.9224,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819894051374917e-05,
+ "loss": 0.9435,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981871467985983e-05,
+ "loss": 0.9558,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817531494869105e-05,
+ "loss": 0.9753,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9816344496862272e-05,
+ "loss": 0.9506,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9815153686300352e-05,
+ "loss": 0.9922,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981395906364584e-05,
+ "loss": 1.007,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812760629362714e-05,
+ "loss": 0.9239,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981155838391643e-05,
+ "loss": 0.8999,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9810352327773935e-05,
+ "loss": 0.8966,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809142461403635e-05,
+ "loss": 0.8609,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9807928785275433e-05,
+ "loss": 1.0235,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980671129986071e-05,
+ "loss": 0.9631,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9805490005632323e-05,
+ "loss": 1.0053,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98042649030646e-05,
+ "loss": 0.9878,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9803035992633366e-05,
+ "loss": 1.0371,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9801803274815915e-05,
+ "loss": 1.0088,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9800566750091018e-05,
+ "loss": 0.9889,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9799326418938924e-05,
+ "loss": 0.9152,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979808228184137e-05,
+ "loss": 0.9684,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9796834339281557e-05,
+ "loss": 0.9171,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979558259174418e-05,
+ "loss": 0.9473,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794327039715395e-05,
+ "loss": 0.9039,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979306768368285e-05,
+ "loss": 0.9673,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9791804524135663e-05,
+ "loss": 0.9844,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979053756156443e-05,
+ "loss": 0.9177,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789266796461222e-05,
+ "loss": 0.9703,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9787992229319594e-05,
+ "loss": 0.9167,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978671386063457e-05,
+ "loss": 0.9837,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978543169090265e-05,
+ "loss": 0.3561,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9784145720621827e-05,
+ "loss": 0.9968,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9782855950291542e-05,
+ "loss": 0.963,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978156238041274e-05,
+ "loss": 0.9685,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9780265011487822e-05,
+ "loss": 0.9381,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9778963844020668e-05,
+ "loss": 0.9407,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977765887851664e-05,
+ "loss": 0.9165,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977635011548257e-05,
+ "loss": 0.3316,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775037555426772e-05,
+ "loss": 0.9159,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773721198859024e-05,
+ "loss": 0.9379,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9772401046290584e-05,
+ "loss": 0.9768,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9771077098234187e-05,
+ "loss": 0.9927,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9769749355204034e-05,
+ "loss": 0.9546,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976841781771581e-05,
+ "loss": 0.9958,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767082486286667e-05,
+ "loss": 0.9466,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9765743361435234e-05,
+ "loss": 0.9159,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9764400443681607e-05,
+ "loss": 0.8767,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9763053733547367e-05,
+ "loss": 1.0446,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976170323155555e-05,
+ "loss": 0.9448,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976034893823069e-05,
+ "loss": 0.9132,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975899085409876e-05,
+ "loss": 0.9193,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9757628979687247e-05,
+ "loss": 0.9665,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975626331552507e-05,
+ "loss": 0.9742,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9754893862142643e-05,
+ "loss": 0.294,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9753520620071846e-05,
+ "loss": 1.0007,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9752143589846027e-05,
+ "loss": 0.9719,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750762772000014e-05,
+ "loss": 0.9857,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9749378167070097e-05,
+ "loss": 0.9214,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9747989775594044e-05,
+ "loss": 0.9272,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974659759811109e-05,
+ "loss": 0.9527,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9745201635161938e-05,
+ "loss": 0.9597,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9743801887288762e-05,
+ "loss": 0.9602,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9742398355035212e-05,
+ "loss": 0.9721,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9740991038946404e-05,
+ "loss": 0.3203,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973957993956892e-05,
+ "loss": 0.9899,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9738165057450817e-05,
+ "loss": 0.9541,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736746393141617e-05,
+ "loss": 0.9986,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9735323947192317e-05,
+ "loss": 1.0268,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733897720155377e-05,
+ "loss": 0.9229,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9732467712584723e-05,
+ "loss": 0.9152,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973103392503576e-05,
+ "loss": 1.0084,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9729596358065347e-05,
+ "loss": 0.9658,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9728155012231825e-05,
+ "loss": 0.9228,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9726709888094994e-05,
+ "loss": 0.9909,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972526098621612e-05,
+ "loss": 0.955,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972380830715795e-05,
+ "loss": 0.9968,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9722351851484677e-05,
+ "loss": 0.9466,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9720891619761974e-05,
+ "loss": 0.9519,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9719427612556982e-05,
+ "loss": 1.0199,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9717959830438302e-05,
+ "loss": 0.9054,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9716488273976006e-05,
+ "loss": 0.9618,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971501294374162e-05,
+ "loss": 0.9405,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971353384030816e-05,
+ "loss": 0.9531,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9712050964250083e-05,
+ "loss": 0.9163,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9710564316143323e-05,
+ "loss": 0.9231,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9709073896565276e-05,
+ "loss": 0.9702,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9707579706094807e-05,
+ "loss": 0.9434,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970608174531224e-05,
+ "loss": 0.9116,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970458001479937e-05,
+ "loss": 0.9492,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9703074515139445e-05,
+ "loss": 0.951,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9701565246917184e-05,
+ "loss": 0.968,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9700052210718775e-05,
+ "loss": 0.9962,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969853540713186e-05,
+ "loss": 1.0122,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9697014836745552e-05,
+ "loss": 0.9703,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9695490500150418e-05,
+ "loss": 0.9328,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693962397938495e-05,
+ "loss": 0.97,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9692430530703282e-05,
+ "loss": 0.9872,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690894899039735e-05,
+ "loss": 1.015,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9689355503544277e-05,
+ "loss": 0.903,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968781234481479e-05,
+ "loss": 0.9144,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686265423450624e-05,
+ "loss": 0.9404,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9684714740052584e-05,
+ "loss": 0.9781,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9683160295222934e-05,
+ "loss": 0.9543,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9681602089565403e-05,
+ "loss": 0.9393,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968004012368518e-05,
+ "loss": 0.9467,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967847439818892e-05,
+ "loss": 0.8951,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9676904913684725e-05,
+ "loss": 0.9328,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967533167078217e-05,
+ "loss": 0.9344,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9673754670092283e-05,
+ "loss": 0.979,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9672173912227556e-05,
+ "loss": 0.9351,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967058939780193e-05,
+ "loss": 0.9756,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966900112743082e-05,
+ "loss": 0.886,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966740910173108e-05,
+ "loss": 0.9123,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9665813321321054e-05,
+ "loss": 0.958,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9664213786820502e-05,
+ "loss": 0.956,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9662610498850684e-05,
+ "loss": 0.928,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9661003458034288e-05,
+ "loss": 0.9454,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965939266499547e-05,
+ "loss": 0.3032,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9657778120359848e-05,
+ "loss": 0.356,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965615982475449e-05,
+ "loss": 0.9396,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9654537778807924e-05,
+ "loss": 0.9366,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9652911983150135e-05,
+ "loss": 0.9995,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965128243841256e-05,
+ "loss": 0.9299,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96496491452281e-05,
+ "loss": 1.0017,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9648012104231106e-05,
+ "loss": 0.9755,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964637131605738e-05,
+ "loss": 0.9781,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9644726781344197e-05,
+ "loss": 0.9331,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964307850073026e-05,
+ "loss": 0.9641,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964142647485576e-05,
+ "loss": 0.948,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9639770704362305e-05,
+ "loss": 0.9493,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9638111189892994e-05,
+ "loss": 0.9352,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636447932092354e-05,
+ "loss": 0.9591,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963478093160638e-05,
+ "loss": 0.359,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633110189082515e-05,
+ "loss": 0.94,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963143570516965e-05,
+ "loss": 0.9336,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629757480518144e-05,
+ "loss": 0.9057,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9628075515779796e-05,
+ "loss": 0.9284,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.962638981160786e-05,
+ "loss": 0.9744,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9624700368657045e-05,
+ "loss": 0.9535,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9623007187583518e-05,
+ "loss": 0.9348,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962131026904488e-05,
+ "loss": 0.9052,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.96196096137002e-05,
+ "loss": 0.981,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9617905222209998e-05,
+ "loss": 0.9034,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961619709523623e-05,
+ "loss": 0.9294,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9614485233442316e-05,
+ "loss": 0.9432,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961276963749313e-05,
+ "loss": 0.9437,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9611050308054982e-05,
+ "loss": 0.9222,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9609327245795642e-05,
+ "loss": 0.9645,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9607600451384327e-05,
+ "loss": 0.9591,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960586992549171e-05,
+ "loss": 0.3233,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9604135668789897e-05,
+ "loss": 0.3091,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9602397681952462e-05,
+ "loss": 0.9029,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9600655965654413e-05,
+ "loss": 0.9137,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959891052057222e-05,
+ "loss": 0.9258,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597161347383783e-05,
+ "loss": 1.0029,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959540844676847e-05,
+ "loss": 0.9326,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9593651819407084e-05,
+ "loss": 0.9146,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959189146598188e-05,
+ "loss": 0.9942,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590127387176556e-05,
+ "loss": 0.9462,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9588359583676263e-05,
+ "loss": 0.9417,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9586588056167595e-05,
+ "loss": 0.9543,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958481280533859e-05,
+ "loss": 0.9091,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958303383187874e-05,
+ "loss": 0.9614,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9581251136478974e-05,
+ "loss": 0.966,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579464719831668e-05,
+ "loss": 1.0124,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9577674582630653e-05,
+ "loss": 0.9958,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957588072557119e-05,
+ "loss": 0.9447,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957408314935e-05,
+ "loss": 0.8778,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9572281854665233e-05,
+ "loss": 0.9647,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95704768422165e-05,
+ "loss": 0.9164,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956866811270484e-05,
+ "loss": 0.9681,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9566855666832743e-05,
+ "loss": 0.9696,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9565039505304145e-05,
+ "loss": 0.9038,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956321962882442e-05,
+ "loss": 0.9858,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956139603810039e-05,
+ "loss": 0.9405,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9559568733840317e-05,
+ "loss": 0.9368,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9557737716753898e-05,
+ "loss": 0.9261,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9555902987552283e-05,
+ "loss": 0.952,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9554064546948064e-05,
+ "loss": 0.9369,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9552222395655262e-05,
+ "loss": 0.8745,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9550376534389355e-05,
+ "loss": 0.9598,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9548526963867253e-05,
+ "loss": 0.985,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9546673684807303e-05,
+ "loss": 0.9148,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95448166979293e-05,
+ "loss": 0.9259,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9542956003954477e-05,
+ "loss": 0.9543,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541091603605508e-05,
+ "loss": 0.8677,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95392234976065e-05,
+ "loss": 0.9009,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537351686683003e-05,
+ "loss": 0.9323,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9535476171562014e-05,
+ "loss": 0.9528,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533596952971955e-05,
+ "loss": 0.9111,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9531714031642698e-05,
+ "loss": 0.8794,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529827408305542e-05,
+ "loss": 0.9228,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9527937083693233e-05,
+ "loss": 0.978,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952604305853995e-05,
+ "loss": 0.9506,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9524145333581315e-05,
+ "loss": 0.994,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9522243909554375e-05,
+ "loss": 0.969,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952033878719763e-05,
+ "loss": 1.0084,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518429967251e-05,
+ "loss": 0.3728,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9516517450455853e-05,
+ "loss": 0.9258,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951460123755499e-05,
+ "loss": 0.8782,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9512681329292635e-05,
+ "loss": 0.9232,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951075772641447e-05,
+ "loss": 0.9109,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95088304296676e-05,
+ "loss": 0.9142,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950689943980056e-05,
+ "loss": 0.9372,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9504964757563322e-05,
+ "loss": 0.9459,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95030263837073e-05,
+ "loss": 0.9116,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9501084318985335e-05,
+ "loss": 0.9795,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94991385641517e-05,
+ "loss": 0.9757,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9497189119962105e-05,
+ "loss": 0.987,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9495235987173693e-05,
+ "loss": 0.8944,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949327916654504e-05,
+ "loss": 0.985,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949131865883614e-05,
+ "loss": 0.8853,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948935446480845e-05,
+ "loss": 0.91,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948738658522483e-05,
+ "loss": 0.9634,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485415020849583e-05,
+ "loss": 0.358,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9483439772448444e-05,
+ "loss": 0.8701,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481460840788573e-05,
+ "loss": 0.8917,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9479478226638565e-05,
+ "loss": 0.3685,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.947749193076845e-05,
+ "loss": 0.9397,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9475501953949674e-05,
+ "loss": 0.9149,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473508296955126e-05,
+ "loss": 0.3384,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9471510960559122e-05,
+ "loss": 0.9735,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469509945537395e-05,
+ "loss": 1.0004,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9467505252667126e-05,
+ "loss": 0.9774,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9465496882726913e-05,
+ "loss": 0.9733,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946348483649678e-05,
+ "loss": 0.9641,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461469114758184e-05,
+ "loss": 0.9397,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9459449718294008e-05,
+ "loss": 0.9411,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945742664788856e-05,
+ "loss": 0.9532,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9455399904327585e-05,
+ "loss": 0.9781,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945336948839824e-05,
+ "loss": 0.9334,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9451335400889114e-05,
+ "loss": 0.9683,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944929764259023e-05,
+ "loss": 0.3255,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9447256214293026e-05,
+ "loss": 0.9136,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9445211116790365e-05,
+ "loss": 0.9113,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9443162350876544e-05,
+ "loss": 0.9494,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944110991734728e-05,
+ "loss": 0.8912,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9439053816999717e-05,
+ "loss": 0.9126,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9436994050632415e-05,
+ "loss": 0.932,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9434930619045367e-05,
+ "loss": 0.9146,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9432863523039986e-05,
+ "loss": 0.9433,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9430792763419105e-05,
+ "loss": 0.949,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.942871834098699e-05,
+ "loss": 0.9449,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9426640256549313e-05,
+ "loss": 0.3057,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9424558510913186e-05,
+ "loss": 0.968,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422473104887133e-05,
+ "loss": 0.9604,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9420384039281103e-05,
+ "loss": 0.9836,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941829131490646e-05,
+ "loss": 0.9014,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9416194932576e-05,
+ "loss": 0.9281,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941409489310393e-05,
+ "loss": 0.8974,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9411991197305878e-05,
+ "loss": 0.9138,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9409883845998905e-05,
+ "loss": 0.955,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9407772840001473e-05,
+ "loss": 0.9367,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405658180133477e-05,
+ "loss": 0.9613,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9403539867216226e-05,
+ "loss": 0.9882,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9401417902072447e-05,
+ "loss": 0.9232,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9399292285526286e-05,
+ "loss": 1.0081,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939716301840331e-05,
+ "loss": 0.9325,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9395030101530504e-05,
+ "loss": 0.969,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939289353573626e-05,
+ "loss": 0.9948,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9390753321850404e-05,
+ "loss": 0.9315,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938860946070417e-05,
+ "loss": 0.9175,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93864619531302e-05,
+ "loss": 0.9635,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9384310799962575e-05,
+ "loss": 0.9699,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9382156002036764e-05,
+ "loss": 0.8872,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9379997560189677e-05,
+ "loss": 0.9445,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937783547525962e-05,
+ "loss": 0.9454,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9375669748086326e-05,
+ "loss": 0.9394,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937350037951094e-05,
+ "loss": 0.9521,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9371327370376018e-05,
+ "loss": 0.9497,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936915072152553e-05,
+ "loss": 0.912,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936697043380486e-05,
+ "loss": 0.3029,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936478650806081e-05,
+ "loss": 0.9439,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936259894514159e-05,
+ "loss": 0.9478,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360407745896828e-05,
+ "loss": 0.9617,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9358212911177556e-05,
+ "loss": 0.8989,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935601444183622e-05,
+ "loss": 0.9565,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935381233872669e-05,
+ "loss": 0.8481,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935160660270423e-05,
+ "loss": 0.9559,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934939723462552e-05,
+ "loss": 0.907,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9347184235348663e-05,
+ "loss": 0.9452,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9344967605733154e-05,
+ "loss": 0.8931,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934274734663991e-05,
+ "loss": 0.9234,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934052345893125e-05,
+ "loss": 0.9326,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9338295943470915e-05,
+ "loss": 0.9554,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9336064801124034e-05,
+ "loss": 0.9385,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933383003275717e-05,
+ "loss": 0.9185,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933159163923827e-05,
+ "loss": 0.947,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9329349621436708e-05,
+ "loss": 0.9201,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9327103980223255e-05,
+ "loss": 0.3265,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932485471647009e-05,
+ "loss": 0.3056,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9322601831050804e-05,
+ "loss": 0.9428,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9320345324840396e-05,
+ "loss": 0.9848,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318085198715257e-05,
+ "loss": 0.9482,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93158214535532e-05,
+ "loss": 0.9088,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9313554090233436e-05,
+ "loss": 0.9325,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9311283109636586e-05,
+ "loss": 0.9473,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309008512644668e-05,
+ "loss": 0.9608,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930673030014111e-05,
+ "loss": 1.0158,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930444847301075e-05,
+ "loss": 0.953,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9302163032139813e-05,
+ "loss": 0.9279,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9299873978415947e-05,
+ "loss": 0.9526,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9297581312728187e-05,
+ "loss": 0.9076,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528503596698e-05,
+ "loss": 0.9154,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929298514902418e-05,
+ "loss": 0.9768,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929068165279303e-05,
+ "loss": 0.9425,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928837454816818e-05,
+ "loss": 0.9292,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9286063836045687e-05,
+ "loss": 0.9183,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9283749517323e-05,
+ "loss": 0.9553,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928143159289898e-05,
+ "loss": 0.9285,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927911006367388e-05,
+ "loss": 0.8953,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927678493054935e-05,
+ "loss": 1.01,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9274456194428454e-05,
+ "loss": 0.8866,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9272123856215643e-05,
+ "loss": 0.948,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9269787916816764e-05,
+ "loss": 0.9182,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9267448377139074e-05,
+ "loss": 0.9536,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9265105238091227e-05,
+ "loss": 0.9415,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9262758500583265e-05,
+ "loss": 0.8991,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9260408165526638e-05,
+ "loss": 0.9534,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9258054233834184e-05,
+ "loss": 0.8794,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9255696706420147e-05,
+ "loss": 0.9673,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9253335584200165e-05,
+ "loss": 0.8788,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9250970868091268e-05,
+ "loss": 0.9376,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924860255901188e-05,
+ "loss": 0.8811,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246230657881834e-05,
+ "loss": 0.9476,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9243855165622345e-05,
+ "loss": 0.9362,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241476083156026e-05,
+ "loss": 0.9155,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9239093411406885e-05,
+ "loss": 0.9678,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236707151300326e-05,
+ "loss": 0.8807,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9234317303763145e-05,
+ "loss": 0.8831,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231923869723528e-05,
+ "loss": 0.3529,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922952685011106e-05,
+ "loss": 0.964,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9227126245856716e-05,
+ "loss": 0.3712,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922472205789286e-05,
+ "loss": 0.9838,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222314287153255e-05,
+ "loss": 0.299,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9219902934573048e-05,
+ "loss": 0.9794,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9217488001088784e-05,
+ "loss": 0.9706,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9215069487638396e-05,
+ "loss": 0.991,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.92126473951612e-05,
+ "loss": 0.9366,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921022172459791e-05,
+ "loss": 0.9889,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920779247689064e-05,
+ "loss": 0.9183,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9205359652982868e-05,
+ "loss": 0.9689,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9202923253819482e-05,
+ "loss": 0.9095,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920048328034675e-05,
+ "loss": 0.8812,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9198039733512326e-05,
+ "loss": 0.9137,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9195592614265262e-05,
+ "loss": 0.975,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9193141923555984e-05,
+ "loss": 0.8885,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.919068766233632e-05,
+ "loss": 0.854,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9188229831559468e-05,
+ "loss": 0.9822,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9185768432180026e-05,
+ "loss": 0.912,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9183303465153972e-05,
+ "loss": 0.9205,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9180834931438673e-05,
+ "loss": 0.9676,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917836283199288e-05,
+ "loss": 0.8845,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917588716777672e-05,
+ "loss": 0.9723,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917340793975172e-05,
+ "loss": 0.9122,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917092514888078e-05,
+ "loss": 0.9583,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9168438796128193e-05,
+ "loss": 0.9079,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9165948882459623e-05,
+ "loss": 0.8845,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9163455408842123e-05,
+ "loss": 0.9921,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160958376244138e-05,
+ "loss": 0.9166,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9158457785635478e-05,
+ "loss": 0.9785,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915595363798735e-05,
+ "loss": 0.8986,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915344593427233e-05,
+ "loss": 0.9226,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150934675464384e-05,
+ "loss": 0.8712,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9148419862538858e-05,
+ "loss": 0.9654,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145901496472474e-05,
+ "loss": 0.9771,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9143379578243335e-05,
+ "loss": 0.9436,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.914085410883093e-05,
+ "loss": 0.9894,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9138325089216118e-05,
+ "loss": 0.9068,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913579252038114e-05,
+ "loss": 0.9422,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9133256403309627e-05,
+ "loss": 0.9182,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913071673898656e-05,
+ "loss": 0.9261,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.912817352839833e-05,
+ "loss": 0.8802,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9125626772532683e-05,
+ "loss": 0.877,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9123076472378753e-05,
+ "loss": 0.9579,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9120522628927047e-05,
+ "loss": 0.9898,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9117965243169444e-05,
+ "loss": 0.9051,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9115404316099212e-05,
+ "loss": 0.9402,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9112839848710978e-05,
+ "loss": 0.9451,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9110271842000755e-05,
+ "loss": 0.3687,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9107700296965926e-05,
+ "loss": 0.9534,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910512521460525e-05,
+ "loss": 0.9271,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9102546595918857e-05,
+ "loss": 1.0075,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9099964441908258e-05,
+ "loss": 0.9131,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097378753576327e-05,
+ "loss": 0.9214,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9094789531927315e-05,
+ "loss": 0.9203,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.909219677796685e-05,
+ "loss": 0.9698,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9089600492701926e-05,
+ "loss": 0.345,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908700067714091e-05,
+ "loss": 0.3491,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9084397332293537e-05,
+ "loss": 0.9524,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081790459170926e-05,
+ "loss": 0.9527,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9079180058785547e-05,
+ "loss": 0.9158,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9076566132151255e-05,
+ "loss": 0.9576,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.907394868028326e-05,
+ "loss": 0.9538,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9071327704198163e-05,
+ "loss": 0.9251,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906870320491391e-05,
+ "loss": 0.8861,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9066075183449835e-05,
+ "loss": 0.9199,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9063443640826624e-05,
+ "loss": 0.8862,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906080857806634e-05,
+ "loss": 0.9411,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905816999619242e-05,
+ "loss": 0.955,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9055527896229642e-05,
+ "loss": 0.9152,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905288227920418e-05,
+ "loss": 0.8776,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9050233146143554e-05,
+ "loss": 0.9462,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9047580498076663e-05,
+ "loss": 1.0074,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904492433603376e-05,
+ "loss": 0.8786,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904226466104647e-05,
+ "loss": 0.9435,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903960147414778e-05,
+ "loss": 0.9668,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903693477637204e-05,
+ "loss": 0.3139,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9034264568754967e-05,
+ "loss": 0.9452,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9031590852333637e-05,
+ "loss": 0.9357,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9028913628146487e-05,
+ "loss": 0.9148,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902623289723333e-05,
+ "loss": 0.9018,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902354866063532e-05,
+ "loss": 0.9095,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020860919394992e-05,
+ "loss": 0.8821,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9018169674556228e-05,
+ "loss": 0.8456,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901547492716428e-05,
+ "loss": 0.9265,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9012776678265756e-05,
+ "loss": 0.9095,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9010074928908624e-05,
+ "loss": 0.9256,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900736968014221e-05,
+ "loss": 0.9216,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004660933017208e-05,
+ "loss": 0.9195,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900194868858566e-05,
+ "loss": 0.9252,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8999232947900968e-05,
+ "loss": 0.916,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89965137120179e-05,
+ "loss": 0.3312,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899379098199257e-05,
+ "loss": 0.8992,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899106475888246e-05,
+ "loss": 0.9512,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89883350437464e-05,
+ "loss": 0.9483,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8985601837644586e-05,
+ "loss": 0.954,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8982865141638557e-05,
+ "loss": 0.9777,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8980124956791216e-05,
+ "loss": 0.9181,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8977381284166818e-05,
+ "loss": 0.9654,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897463412483098e-05,
+ "loss": 0.8833,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897188347985066e-05,
+ "loss": 0.9304,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896912935029418e-05,
+ "loss": 0.9227,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896637173723121e-05,
+ "loss": 0.9524,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8963610641732777e-05,
+ "loss": 0.9938,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8960846064871257e-05,
+ "loss": 0.8756,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8958078007720387e-05,
+ "loss": 0.8807,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895530647135524e-05,
+ "loss": 0.9026,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8952531456852248e-05,
+ "loss": 0.3359,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8949752965289197e-05,
+ "loss": 0.8913,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894697099774523e-05,
+ "loss": 0.8688,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894418555530082e-05,
+ "loss": 0.9398,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89413966390378e-05,
+ "loss": 0.9413,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8938604250039362e-05,
+ "loss": 0.9731,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935808389390032e-05,
+ "loss": 0.9106,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.893300905817569e-05,
+ "loss": 0.8899,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8930206257483566e-05,
+ "loss": 0.983,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8927399988402233e-05,
+ "loss": 0.9512,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8924590252021614e-05,
+ "loss": 0.9165,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8921777049432985e-05,
+ "loss": 0.35,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8918960381728947e-05,
+ "loss": 0.9625,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8916140250003475e-05,
+ "loss": 0.905,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891331665535187e-05,
+ "loss": 0.9542,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8910489598870784e-05,
+ "loss": 0.9589,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8907659081658214e-05,
+ "loss": 0.9409,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8904825104813497e-05,
+ "loss": 0.89,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8901987669437322e-05,
+ "loss": 0.944,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.889914677663171e-05,
+ "loss": 0.9217,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8896302427500042e-05,
+ "loss": 0.8912,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8893454623147017e-05,
+ "loss": 0.9592,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88906033646787e-05,
+ "loss": 0.9194,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8887748653202478e-05,
+ "loss": 0.9415,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8884890489827097e-05,
+ "loss": 0.8378,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8882028875662627e-05,
+ "loss": 0.8577,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8879163811820493e-05,
+ "loss": 0.9159,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876295299413445e-05,
+ "loss": 0.8698,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8873423339555584e-05,
+ "loss": 0.9418,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8870547933362352e-05,
+ "loss": 0.3612,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886766908195051e-05,
+ "loss": 0.9417,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864786786438187e-05,
+ "loss": 0.9079,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886190104794482e-05,
+ "loss": 0.9634,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8859011867591203e-05,
+ "loss": 0.9203,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885611924649946e-05,
+ "loss": 0.9226,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885322318579305e-05,
+ "loss": 0.9712,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8850323686596766e-05,
+ "loss": 0.9656,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8847420750036748e-05,
+ "loss": 0.8569,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884451437724046e-05,
+ "loss": 0.9105,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8841604569336702e-05,
+ "loss": 0.9168,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883869132745561e-05,
+ "loss": 0.8854,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883577465272866e-05,
+ "loss": 0.8706,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8832854546288642e-05,
+ "loss": 0.9097,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8829931009269707e-05,
+ "loss": 0.9096,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882700404280731e-05,
+ "loss": 0.932,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8824073648038258e-05,
+ "loss": 0.9086,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882113982610068e-05,
+ "loss": 0.9184,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881820257813404e-05,
+ "loss": 0.9778,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8815261905279133e-05,
+ "loss": 0.9168,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8812317808678075e-05,
+ "loss": 0.3528,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8809370289474327e-05,
+ "loss": 0.9731,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8806419348812673e-05,
+ "loss": 0.9179,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8803464987839217e-05,
+ "loss": 0.9582,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88005072077014e-05,
+ "loss": 0.947,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797546009547996e-05,
+ "loss": 0.958,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.879458139452909e-05,
+ "loss": 0.9568,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791613363796118e-05,
+ "loss": 0.9292,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8788641918501817e-05,
+ "loss": 0.8576,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785667059800264e-05,
+ "loss": 0.8156,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8782688788846865e-05,
+ "loss": 0.8837,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877970710679834e-05,
+ "loss": 0.9345,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877672201481275e-05,
+ "loss": 0.8941,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877373351404946e-05,
+ "loss": 0.9011,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8770741605669173e-05,
+ "loss": 0.9253,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.876774629083391e-05,
+ "loss": 0.9121,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8764747570707017e-05,
+ "loss": 0.937,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8761745446453167e-05,
+ "loss": 0.9532,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875873991923835e-05,
+ "loss": 0.9758,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875573099022987e-05,
+ "loss": 0.9477,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8752718660596367e-05,
+ "loss": 0.9285,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8749702931507797e-05,
+ "loss": 0.904,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.874668380413543e-05,
+ "loss": 0.8815,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8743661279651856e-05,
+ "loss": 0.9679,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8740635359231e-05,
+ "loss": 0.9621,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8737606044048086e-05,
+ "loss": 0.8696,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873457333527967e-05,
+ "loss": 0.9741,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873153723410362e-05,
+ "loss": 0.3411,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8728497741699115e-05,
+ "loss": 0.9085,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872545485924667e-05,
+ "loss": 0.9056,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8722408587928104e-05,
+ "loss": 1.0055,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8719358928926546e-05,
+ "loss": 0.9415,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8716305883426456e-05,
+ "loss": 0.8918,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.87132494526136e-05,
+ "loss": 0.8949,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710189637675055e-05,
+ "loss": 0.8764,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8707126439799225e-05,
+ "loss": 0.8984,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.870405986017582e-05,
+ "loss": 0.8995,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8700989899995857e-05,
+ "loss": 0.9052,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697916560451682e-05,
+ "loss": 0.923,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.869483984273694e-05,
+ "loss": 0.8883,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691759748046594e-05,
+ "loss": 0.952,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8688676277576916e-05,
+ "loss": 0.971,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868558943252549e-05,
+ "loss": 0.908,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868249921409122e-05,
+ "loss": 0.9385,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8679405623474294e-05,
+ "loss": 0.9478,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8676308661876242e-05,
+ "loss": 0.9882,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8673208330499884e-05,
+ "loss": 0.898,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8670104630549348e-05,
+ "loss": 0.9524,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866699756323008e-05,
+ "loss": 0.8181,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866388712974883e-05,
+ "loss": 0.9904,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866077333131365e-05,
+ "loss": 0.8949,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8657656169133908e-05,
+ "loss": 0.9498,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8654535644420276e-05,
+ "loss": 0.8722,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8651411758384718e-05,
+ "loss": 0.8998,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8648284512240527e-05,
+ "loss": 0.8935,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8645153907202285e-05,
+ "loss": 0.8978,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8642019944485884e-05,
+ "loss": 0.331,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863888262530852e-05,
+ "loss": 0.9529,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863574195088869e-05,
+ "loss": 0.8517,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8632597922446195e-05,
+ "loss": 0.9371,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8629450541202142e-05,
+ "loss": 0.971,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626299808378933e-05,
+ "loss": 0.9409,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.862314572520028e-05,
+ "loss": 0.9195,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861998829289119e-05,
+ "loss": 1.0161,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861682751267798e-05,
+ "loss": 0.8846,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861366338578825e-05,
+ "loss": 0.9276,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8610495913450922e-05,
+ "loss": 0.9159,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8607325096896197e-05,
+ "loss": 0.3851,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8604150937355588e-05,
+ "loss": 0.8811,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.86009734360619e-05,
+ "loss": 0.9636,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8597792594249237e-05,
+ "loss": 0.9326,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8594608413153e-05,
+ "loss": 0.9532,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8591420894009897e-05,
+ "loss": 0.9115,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8588230038057913e-05,
+ "loss": 0.9345,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8585035846536347e-05,
+ "loss": 0.9732,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8581838320685782e-05,
+ "loss": 0.96,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8578637461748105e-05,
+ "loss": 0.946,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857543327096649e-05,
+ "loss": 0.9461,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.85722257495854e-05,
+ "loss": 0.9272,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856901489885061e-05,
+ "loss": 0.9934,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856580072000918e-05,
+ "loss": 0.8986,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8562583214309447e-05,
+ "loss": 0.9382,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855936238300106e-05,
+ "loss": 0.9913,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8556138227334957e-05,
+ "loss": 0.9356,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855291074856336e-05,
+ "loss": 0.9394,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8549679947939778e-05,
+ "loss": 0.9326,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8546445826719023e-05,
+ "loss": 0.9758,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8543208386157195e-05,
+ "loss": 0.9908,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853996762751167e-05,
+ "loss": 0.9939,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8536723552041124e-05,
+ "loss": 0.9334,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853347616100552e-05,
+ "loss": 0.8662,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8530225455666103e-05,
+ "loss": 0.874,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8526971437285416e-05,
+ "loss": 0.9607,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8523714107127278e-05,
+ "loss": 0.9436,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520453466456797e-05,
+ "loss": 0.9564,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8517189516540376e-05,
+ "loss": 0.9919,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513922258645687e-05,
+ "loss": 0.9457,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8510651694041702e-05,
+ "loss": 0.9113,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8507377823998664e-05,
+ "loss": 0.889,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.850410064978811e-05,
+ "loss": 0.8957,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8500820172682858e-05,
+ "loss": 0.9713,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8497536393957005e-05,
+ "loss": 0.3168,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8494249314885932e-05,
+ "loss": 0.8657,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8490958936746304e-05,
+ "loss": 0.9021,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.848766526081607e-05,
+ "loss": 0.8838,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8484368288374452e-05,
+ "loss": 0.9475,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8481068020701954e-05,
+ "loss": 0.9845,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8477764459080364e-05,
+ "loss": 0.8957,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8474457604792746e-05,
+ "loss": 0.3215,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8471147459123447e-05,
+ "loss": 0.9442,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8467834023358088e-05,
+ "loss": 0.8866,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846451729878357e-05,
+ "loss": 0.9572,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846119728668807e-05,
+ "loss": 0.9419,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845787398836104e-05,
+ "loss": 0.9387,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8454547405093212e-05,
+ "loss": 0.9377,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8451217538176597e-05,
+ "loss": 0.9553,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.844788438890447e-05,
+ "loss": 0.986,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444547958571396e-05,
+ "loss": 0.3361,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.84412082484732e-05,
+ "loss": 0.3385,
+ "step": 1064
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8437865259906987e-05,
+ "loss": 0.9415,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8434518994171136e-05,
+ "loss": 0.9397,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84311694525653e-05,
+ "loss": 0.9054,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84278166363904e-05,
+ "loss": 0.8955,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8424460546948632e-05,
+ "loss": 0.9017,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8421101185543463e-05,
+ "loss": 0.92,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841773855347963e-05,
+ "loss": 0.9055,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841437265206314e-05,
+ "loss": 0.8364,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841100348260127e-05,
+ "loss": 0.9515,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.840763104640257e-05,
+ "loss": 0.9436,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8404255344776853e-05,
+ "loss": 0.9395,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84008763790352e-05,
+ "loss": 0.8926,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8397494150489963e-05,
+ "loss": 0.9672,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8394108660454766e-05,
+ "loss": 0.865,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8390719910244487e-05,
+ "loss": 0.9504,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8387327901175286e-05,
+ "loss": 0.941,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838393263456457e-05,
+ "loss": 0.9799,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838053411173103e-05,
+ "loss": 0.9256,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8377132333994606e-05,
+ "loss": 0.9193,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837372730267652e-05,
+ "loss": 0.8726,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8370319019099236e-05,
+ "loss": 0.9096,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8366907484586497e-05,
+ "loss": 0.9407,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83634927004633e-05,
+ "loss": 0.9167,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360074668055915e-05,
+ "loss": 0.9128,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8356653388691857e-05,
+ "loss": 0.8422,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353228863699922e-05,
+ "loss": 0.908,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8349801094410148e-05,
+ "loss": 0.8724,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346370082153843e-05,
+ "loss": 0.9003,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8342935828263574e-05,
+ "loss": 0.98,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339498334073166e-05,
+ "loss": 0.8614,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83360576009177e-05,
+ "loss": 0.911,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.833261363013352e-05,
+ "loss": 0.8732,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832916642305822e-05,
+ "loss": 0.8753,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832571598103066e-05,
+ "loss": 0.9483,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8322262305390948e-05,
+ "loss": 0.974,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8318805397480455e-05,
+ "loss": 0.9167,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8315345258641802e-05,
+ "loss": 0.9712,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8311881890218873e-05,
+ "loss": 0.9197,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830841529355679e-05,
+ "loss": 0.9173,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8304945470001948e-05,
+ "loss": 0.9408,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8301472420901985e-05,
+ "loss": 0.9391,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8297996147605787e-05,
+ "loss": 0.9482,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.829451665146351e-05,
+ "loss": 0.8962,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8291033933826535e-05,
+ "loss": 0.9761,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8287547996047523e-05,
+ "loss": 0.933,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8284058839480363e-05,
+ "loss": 0.8762,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8280566465480206e-05,
+ "loss": 0.9217,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8277070875403455e-05,
+ "loss": 0.3614,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8273572070607756e-05,
+ "loss": 0.9359,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8270070052451995e-05,
+ "loss": 0.9401,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8266564822296323e-05,
+ "loss": 0.9186,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.826305638150213e-05,
+ "loss": 0.321,
+ "step": 1116
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.825954473143205e-05,
+ "loss": 0.3378,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8256029873449976e-05,
+ "loss": 0.8492,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.825251180892103e-05,
+ "loss": 0.3327,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8248990539211596e-05,
+ "loss": 0.8799,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8245466065689282e-05,
+ "loss": 0.9734,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824193838972297e-05,
+ "loss": 0.9092,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823840751268275e-05,
+ "loss": 0.8317,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8234873435939987e-05,
+ "loss": 0.8746,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8231336160867275e-05,
+ "loss": 0.9396,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8227795688838446e-05,
+ "loss": 0.9045,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822425202122858e-05,
+ "loss": 0.9036,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8220705159413996e-05,
+ "loss": 0.8528,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8217155104772256e-05,
+ "loss": 0.9213,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8213601858682158e-05,
+ "loss": 0.4092,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8210045422523744e-05,
+ "loss": 0.9155,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8206485797678294e-05,
+ "loss": 0.3397,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820292298552832e-05,
+ "loss": 0.9601,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819935698745759e-05,
+ "loss": 0.8789,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8195787804851076e-05,
+ "loss": 0.9189,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8192215439095025e-05,
+ "loss": 0.8967,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8188639891576893e-05,
+ "loss": 0.8964,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8185061163685386e-05,
+ "loss": 0.8626,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.818147925681044e-05,
+ "loss": 0.9607,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8177894172343227e-05,
+ "loss": 0.9404,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.817430591167615e-05,
+ "loss": 0.9238,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8170714476202848e-05,
+ "loss": 0.9255,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8167119867318197e-05,
+ "loss": 0.3475,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81635220864183e-05,
+ "loss": 0.8938,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8159921134900486e-05,
+ "loss": 0.9342,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156317014163337e-05,
+ "loss": 0.9245,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8152709725606642e-05,
+ "loss": 0.9467,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149099270631434e-05,
+ "loss": 0.8735,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8145485650639973e-05,
+ "loss": 0.3534,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8141868867035745e-05,
+ "loss": 0.9545,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8138248921223465e-05,
+ "loss": 0.8612,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134625814609084e-05,
+ "loss": 0.3531,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8130999548599767e-05,
+ "loss": 0.9884,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8127370124603927e-05,
+ "loss": 0.938,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8123737544031178e-05,
+ "loss": 0.9063,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8120101808292373e-05,
+ "loss": 0.9163,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81164629187996e-05,
+ "loss": 0.9941,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811282087696615e-05,
+ "loss": 0.8835,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8109175684206558e-05,
+ "loss": 0.8915,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8105527341936574e-05,
+ "loss": 0.9839,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.810187585157317e-05,
+ "loss": 0.3224,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8098221214534543e-05,
+ "loss": 0.307,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8094563432240107e-05,
+ "loss": 0.9391,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8090902506110513e-05,
+ "loss": 0.9469,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8087238437567614e-05,
+ "loss": 0.9498,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8083571228034498e-05,
+ "loss": 0.912,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807990087893546e-05,
+ "loss": 0.8633,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807622739169603e-05,
+ "loss": 0.952,
+ "step": 1168
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807255076774294e-05,
+ "loss": 0.3537,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8068871008504153e-05,
+ "loss": 0.9312,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065188115408844e-05,
+ "loss": 1.0083,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8061502089887406e-05,
+ "loss": 0.9158,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805781293337145e-05,
+ "loss": 0.8719,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8054120647293798e-05,
+ "loss": 0.8885,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050425233088496e-05,
+ "loss": 0.3342,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.80467266921908e-05,
+ "loss": 0.8902,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8043025026037178e-05,
+ "loss": 0.8875,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8039320236065314e-05,
+ "loss": 0.9133,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803561232371411e-05,
+ "loss": 0.9502,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803190129042367e-05,
+ "loss": 0.9052,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8028187137635325e-05,
+ "loss": 0.9564,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8024469866791602e-05,
+ "loss": 0.9611,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.802074947933625e-05,
+ "loss": 0.9002,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801702597671422e-05,
+ "loss": 0.3337,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8013299360371685e-05,
+ "loss": 0.8692,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8009569631756013e-05,
+ "loss": 0.9799,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8005836792315793e-05,
+ "loss": 0.8318,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.800210084350081e-05,
+ "loss": 0.9676,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799836178676207e-05,
+ "loss": 0.9258,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799461962355178e-05,
+ "loss": 0.9595,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7990874355323345e-05,
+ "loss": 0.9038,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7987125983531393e-05,
+ "loss": 0.8838,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7983374509631742e-05,
+ "loss": 0.8658,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7979619935081424e-05,
+ "loss": 0.9302,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797586226133867e-05,
+ "loss": 0.8896,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7972101489862924e-05,
+ "loss": 0.9137,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7968337622114824e-05,
+ "loss": 0.8774,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7964570659556206e-05,
+ "loss": 0.9242,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796080060365012e-05,
+ "loss": 0.9346,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7957027455860815e-05,
+ "loss": 0.944,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795325121765373e-05,
+ "loss": 0.8704,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.794947189049552e-05,
+ "loss": 0.3457,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7945689475854033e-05,
+ "loss": 0.9214,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941903975198305e-05,
+ "loss": 0.8942,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7938115389998595e-05,
+ "loss": 0.931,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7934323721726334e-05,
+ "loss": 0.9481,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7930528971854166e-05,
+ "loss": 0.951,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792673114185593e-05,
+ "loss": 0.9125,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7922930233206656e-05,
+ "loss": 0.937,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7919126247382576e-05,
+ "loss": 0.9068,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.791531918586112e-05,
+ "loss": 0.896,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7911509050120892e-05,
+ "loss": 0.97,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7907695841641716e-05,
+ "loss": 0.892,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7903879561904597e-05,
+ "loss": 0.9074,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.790006021239173e-05,
+ "loss": 0.9188,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789623779458651e-05,
+ "loss": 0.3391,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789241230997352e-05,
+ "loss": 1.015,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7888583760038534e-05,
+ "loss": 0.8729,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7884752146268513e-05,
+ "loss": 0.8719,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7880917470151614e-05,
+ "loss": 0.9325,
+ "step": 1220
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7877079733177185e-05,
+ "loss": 0.9018,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7873238936835754e-05,
+ "loss": 0.8804,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786939508261904e-05,
+ "loss": 0.9016,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786554817201996e-05,
+ "loss": 0.9105,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.78616982065326e-05,
+ "loss": 0.3369,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.785784518765225e-05,
+ "loss": 0.8996,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7853989116875373e-05,
+ "loss": 0.3376,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7850129995699626e-05,
+ "loss": 0.8681,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846267825623843e-05,
+ "loss": 0.9937,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7842402608148053e-05,
+ "loss": 0.9595,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838534344773453e-05,
+ "loss": 0.8766,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7834663037002444e-05,
+ "loss": 0.921,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830788686338586e-05,
+ "loss": 0.9094,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7826911294286636e-05,
+ "loss": 0.8882,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.782303086235253e-05,
+ "loss": 0.9083,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781914739204338e-05,
+ "loss": 0.7964,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7815260884867486e-05,
+ "loss": 0.9136,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781137134233432e-05,
+ "loss": 0.9764,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7807478765954532e-05,
+ "loss": 0.9434,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7803583157239958e-05,
+ "loss": 0.9205,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7799684517703605e-05,
+ "loss": 0.8635,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779578284885966e-05,
+ "loss": 0.3322,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779187815222349e-05,
+ "loss": 0.8767,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778797042931163e-05,
+ "loss": 0.902,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7784059681641798e-05,
+ "loss": 0.9385,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778014591073288e-05,
+ "loss": 0.934,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777622911810494e-05,
+ "loss": 0.8906,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777230930527922e-05,
+ "loss": 0.8875,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7768386473778124e-05,
+ "loss": 0.9189,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7764460625125236e-05,
+ "loss": 0.8831,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776053176084531e-05,
+ "loss": 0.9489,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7756599882464274e-05,
+ "loss": 0.9754,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7752664991509224e-05,
+ "loss": 0.8612,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7748727089508423e-05,
+ "loss": 0.8817,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7744786177991307e-05,
+ "loss": 0.3096,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.774084225848849e-05,
+ "loss": 0.8883,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.773689533253173e-05,
+ "loss": 0.8886,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732945401653978e-05,
+ "loss": 0.8891,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7728992467389342e-05,
+ "loss": 0.9199,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7725036531273087e-05,
+ "loss": 0.9217,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7721077594841663e-05,
+ "loss": 0.9471,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.771711565963267e-05,
+ "loss": 0.8964,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7713150727184878e-05,
+ "loss": 0.9179,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770918279903822e-05,
+ "loss": 0.8938,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.77052118767338e-05,
+ "loss": 0.8972,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7701237961813874e-05,
+ "loss": 0.9037,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7697261055821864e-05,
+ "loss": 0.9953,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7693281160302354e-05,
+ "loss": 0.916,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7689298276801095e-05,
+ "loss": 0.8463,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7685312406864986e-05,
+ "loss": 0.9613,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7681323552042094e-05,
+ "loss": 0.34,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.767733171388165e-05,
+ "loss": 0.9194,
+ "step": 1272
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7673336893934033e-05,
+ "loss": 0.8789,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7669339093750786e-05,
+ "loss": 0.872,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.766533831488461e-05,
+ "loss": 0.9512,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7661334558889357e-05,
+ "loss": 0.8791,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7657327827320046e-05,
+ "loss": 0.8505,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765331812173284e-05,
+ "loss": 0.9258,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7649305443685068e-05,
+ "loss": 0.8811,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76452897947352e-05,
+ "loss": 0.8763,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7641271176442876e-05,
+ "loss": 0.8905,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7637249590368878e-05,
+ "loss": 0.9268,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763322503807514e-05,
+ "loss": 0.9025,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7629197521124758e-05,
+ "loss": 0.8868,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7625167041081967e-05,
+ "loss": 0.9169,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7621133599512163e-05,
+ "loss": 0.8898,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761709719798189e-05,
+ "loss": 0.9608,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761305783805883e-05,
+ "loss": 0.9333,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7609015521311836e-05,
+ "loss": 0.3426,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7604970249310893e-05,
+ "loss": 0.8983,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600922023627137e-05,
+ "loss": 0.8992,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759687084583285e-05,
+ "loss": 0.927,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759281671750147e-05,
+ "loss": 0.9638,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7588759640207564e-05,
+ "loss": 0.3531,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7584699615526857e-05,
+ "loss": 0.9061,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7580636645036224e-05,
+ "loss": 0.9489,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757657073031367e-05,
+ "loss": 0.985,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7572501872938343e-05,
+ "loss": 0.3491,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.756843007449055e-05,
+ "loss": 0.9456,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7564355336551727e-05,
+ "loss": 0.8545,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7560277660704455e-05,
+ "loss": 0.3479,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.755619704853246e-05,
+ "loss": 0.8946,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7552113501620595e-05,
+ "loss": 0.8977,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7548027021554874e-05,
+ "loss": 0.9164,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.754393760992243e-05,
+ "loss": 0.9089,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7539845268311548e-05,
+ "loss": 0.9209,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7535749998311645e-05,
+ "loss": 0.9072,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.753165180151328e-05,
+ "loss": 0.3462,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752755067950814e-05,
+ "loss": 0.9508,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752344663388906e-05,
+ "loss": 0.9197,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7519339666249997e-05,
+ "loss": 0.8936,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515229778186052e-05,
+ "loss": 0.897,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7511116971293463e-05,
+ "loss": 0.8872,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7507001247169587e-05,
+ "loss": 0.9111,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7502882607412933e-05,
+ "loss": 0.9244,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749876105362313e-05,
+ "loss": 0.9142,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7494636587400942e-05,
+ "loss": 0.9361,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749050921034826e-05,
+ "loss": 0.9259,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7486378924068123e-05,
+ "loss": 0.9613,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.748224573016467e-05,
+ "loss": 0.9206,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7478109630243195e-05,
+ "loss": 0.8995,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.747397062591011e-05,
+ "loss": 0.8912,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.746982871877296e-05,
+ "loss": 0.9132,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7465683910440405e-05,
+ "loss": 0.3367,
+ "step": 1324
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7461536202522248e-05,
+ "loss": 0.8463,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.745738559662941e-05,
+ "loss": 0.8797,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7453232094373936e-05,
+ "loss": 0.9016,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7449075697369005e-05,
+ "loss": 0.9495,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7444916407228904e-05,
+ "loss": 0.8987,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.744075422556906e-05,
+ "loss": 0.8992,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7436589154006014e-05,
+ "loss": 0.991,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.743242119415743e-05,
+ "loss": 0.8775,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7428250347642102e-05,
+ "loss": 0.8728,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7424076616079933e-05,
+ "loss": 0.9342,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7419900001091953e-05,
+ "loss": 0.9245,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7415720504300314e-05,
+ "loss": 0.9226,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.741153812732828e-05,
+ "loss": 0.9372,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7407352871800246e-05,
+ "loss": 0.8907,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403164739341708e-05,
+ "loss": 0.911,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739897373157929e-05,
+ "loss": 0.8968,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394779850140736e-05,
+ "loss": 0.938,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7390583096654895e-05,
+ "loss": 0.8963,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7386383472751745e-05,
+ "loss": 0.9017,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7382180980062365e-05,
+ "loss": 0.9331,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377975620218954e-05,
+ "loss": 0.9107,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7373767394854836e-05,
+ "loss": 0.9292,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7369556305604422e-05,
+ "loss": 0.9282,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.736534235410326e-05,
+ "loss": 0.8801,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7361125541988e-05,
+ "loss": 0.8528,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7356905870896407e-05,
+ "loss": 0.9513,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.735268334246734e-05,
+ "loss": 0.878,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7348457958340792e-05,
+ "loss": 0.3268,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7344229720157846e-05,
+ "loss": 0.8879,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7339998629560705e-05,
+ "loss": 0.9062,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7335764688192676e-05,
+ "loss": 0.9827,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733152789769817e-05,
+ "loss": 1.0078,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7327288259722714e-05,
+ "loss": 0.9629,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7323045775912927e-05,
+ "loss": 0.9111,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7318800447916543e-05,
+ "loss": 0.94,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7314552277382403e-05,
+ "loss": 0.9217,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7310301265960446e-05,
+ "loss": 0.8825,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7306047415301706e-05,
+ "loss": 0.8768,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7301790727058344e-05,
+ "loss": 0.8997,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7297531202883598e-05,
+ "loss": 0.9012,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7293268844431826e-05,
+ "loss": 0.8939,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289003653358472e-05,
+ "loss": 0.9494,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7284735631320093e-05,
+ "loss": 0.8624,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280464779974335e-05,
+ "loss": 0.9329,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7276191100979952e-05,
+ "loss": 0.8958,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271914595996784e-05,
+ "loss": 0.8791,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7267635266685782e-05,
+ "loss": 0.9476,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7263353114708993e-05,
+ "loss": 0.963,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7259068141729542e-05,
+ "loss": 0.8908,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7254780349411677e-05,
+ "loss": 0.3562,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7250489739420718e-05,
+ "loss": 0.969,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7246196313423095e-05,
+ "loss": 0.3459,
+ "step": 1376
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7241900073086318e-05,
+ "loss": 0.9044,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7237601020079003e-05,
+ "loss": 0.8814,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7233299156070852e-05,
+ "loss": 0.9551,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7228994482732653e-05,
+ "loss": 0.9468,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.72246870017363e-05,
+ "loss": 0.9222,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7220376714754766e-05,
+ "loss": 0.8468,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7216063623462112e-05,
+ "loss": 0.8935,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7211747729533504e-05,
+ "loss": 0.9338,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7207429034645176e-05,
+ "loss": 0.8834,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720310754047446e-05,
+ "loss": 0.9381,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719878324869978e-05,
+ "loss": 0.896,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7194456161000634e-05,
+ "loss": 0.9337,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719012627905762e-05,
+ "loss": 0.8796,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.718579360455241e-05,
+ "loss": 0.3521,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7181458139167767e-05,
+ "loss": 0.8592,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7177119884587536e-05,
+ "loss": 0.8868,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717277884249664e-05,
+ "loss": 0.9154,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716843501458109e-05,
+ "loss": 0.3666,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716408840252799e-05,
+ "loss": 0.895,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7159739008025503e-05,
+ "loss": 0.9729,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155386832762892e-05,
+ "loss": 0.3205,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.715103187843048e-05,
+ "loss": 0.945,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146674146719688e-05,
+ "loss": 0.9108,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7142313639323012e-05,
+ "loss": 0.8483,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137950357934017e-05,
+ "loss": 0.9048,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7133584304247354e-05,
+ "loss": 0.8964,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7129215479958747e-05,
+ "loss": 0.8927,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7124843886765e-05,
+ "loss": 0.898,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712046952636398e-05,
+ "loss": 0.8874,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7116092400454655e-05,
+ "loss": 0.8855,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7111712510737035e-05,
+ "loss": 0.8747,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7107329858912226e-05,
+ "loss": 0.8969,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7102944446682393e-05,
+ "loss": 0.9312,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.709855627575079e-05,
+ "loss": 0.9206,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7094165347821724e-05,
+ "loss": 0.9285,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7089771664600584e-05,
+ "loss": 0.872,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.708537522779382e-05,
+ "loss": 0.9077,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7080976039108964e-05,
+ "loss": 0.3426,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7076574100254614e-05,
+ "loss": 0.9247,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707216941294042e-05,
+ "loss": 0.8963,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706776197887712e-05,
+ "loss": 0.8499,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7063351799776514e-05,
+ "loss": 0.9228,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7058938877351456e-05,
+ "loss": 0.887,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.705452321331588e-05,
+ "loss": 0.8649,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7050104809384774e-05,
+ "loss": 0.302,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70456836672742e-05,
+ "loss": 0.876,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.704125978870128e-05,
+ "loss": 0.8863,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7036833175384192e-05,
+ "loss": 0.8995,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7032403829042182e-05,
+ "loss": 0.3592,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7027971751395563e-05,
+ "loss": 0.3541,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7023536944165697e-05,
+ "loss": 0.9257,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7019099409075014e-05,
+ "loss": 0.8985,
+ "step": 1428
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7014659147847005e-05,
+ "loss": 0.9004,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701021616220621e-05,
+ "loss": 0.9354,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7005770453878234e-05,
+ "loss": 0.978,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7001322024589742e-05,
+ "loss": 0.9114,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6996870876068455e-05,
+ "loss": 0.9199,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6992417010043144e-05,
+ "loss": 0.8406,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6987960428243637e-05,
+ "loss": 0.8679,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6983501132400825e-05,
+ "loss": 0.9248,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6979039124246643e-05,
+ "loss": 1.0086,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6974574405514083e-05,
+ "loss": 0.3541,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6970106977937192e-05,
+ "loss": 0.9326,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.696563684325107e-05,
+ "loss": 0.3749,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6961164003191862e-05,
+ "loss": 0.9212,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6956688459496767e-05,
+ "loss": 0.9714,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695221021390404e-05,
+ "loss": 0.8775,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6947729268152972e-05,
+ "loss": 0.9413,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6943245623983918e-05,
+ "loss": 0.97,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6938759283138268e-05,
+ "loss": 0.8966,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.693427024735847e-05,
+ "loss": 0.9482,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.692977851838801e-05,
+ "loss": 0.9045,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925284097971427e-05,
+ "loss": 0.9114,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6920786987854296e-05,
+ "loss": 0.9462,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691628718978325e-05,
+ "loss": 0.9369,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691178470550596e-05,
+ "loss": 0.9344,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6907279536771127e-05,
+ "loss": 0.9304,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6902771685328524e-05,
+ "loss": 0.8722,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6898261152928933e-05,
+ "loss": 0.9627,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6893747941324197e-05,
+ "loss": 0.9426,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6889232052267203e-05,
+ "loss": 0.883,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688471348751186e-05,
+ "loss": 0.8001,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688019224881313e-05,
+ "loss": 0.9631,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6875668337927014e-05,
+ "loss": 0.8921,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6871141756610544e-05,
+ "loss": 0.8152,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6866612506621788e-05,
+ "loss": 0.8897,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6862080589719863e-05,
+ "loss": 0.8766,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6857546007664908e-05,
+ "loss": 0.9122,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6853008762218103e-05,
+ "loss": 0.9693,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684846885514166e-05,
+ "loss": 0.9323,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6843926288198828e-05,
+ "loss": 0.3472,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683938106315389e-05,
+ "loss": 0.8746,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683483318177216e-05,
+ "loss": 0.3501,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6830282645819974e-05,
+ "loss": 0.9011,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6825729457064718e-05,
+ "loss": 0.9537,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6821173617274793e-05,
+ "loss": 0.8521,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6816615128219635e-05,
+ "loss": 0.8628,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.681205399166971e-05,
+ "loss": 0.876,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6807490209396506e-05,
+ "loss": 0.8959,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6802923783172553e-05,
+ "loss": 0.9094,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.679835471477139e-05,
+ "loss": 0.8905,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793783005967593e-05,
+ "loss": 0.9005,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678920865853676e-05,
+ "loss": 0.3728,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678463167425552e-05,
+ "loss": 0.9065,
+ "step": 1480
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6780052054901512e-05,
+ "loss": 0.8991,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6775469802253416e-05,
+ "loss": 0.8868,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6770884918090923e-05,
+ "loss": 0.8659,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6766297404194745e-05,
+ "loss": 0.9002,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6761707262346624e-05,
+ "loss": 0.8773,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675711449432932e-05,
+ "loss": 0.9197,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6752519101926606e-05,
+ "loss": 0.8743,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6747921086923284e-05,
+ "loss": 0.8821,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674332045110517e-05,
+ "loss": 0.9071,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6738717196259092e-05,
+ "loss": 0.9039,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673411132417291e-05,
+ "loss": 0.9037,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672950283663548e-05,
+ "loss": 0.9522,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6724891735436697e-05,
+ "loss": 0.9196,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6720278022367453e-05,
+ "loss": 0.8583,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6715661699219664e-05,
+ "loss": 0.9125,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6711042767786257e-05,
+ "loss": 0.8885,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6706421229861168e-05,
+ "loss": 0.8732,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6701797087239354e-05,
+ "loss": 0.351,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6697170341716772e-05,
+ "loss": 0.8356,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6692540995090403e-05,
+ "loss": 0.9195,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.668790904915823e-05,
+ "loss": 0.8917,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6683274505719248e-05,
+ "loss": 0.8966,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6678637366573455e-05,
+ "loss": 0.9046,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.667399763352187e-05,
+ "loss": 0.9207,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.666935530836651e-05,
+ "loss": 0.9575,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6664710392910396e-05,
+ "loss": 0.815,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6660062888957564e-05,
+ "loss": 0.9449,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665541279831305e-05,
+ "loss": 0.8779,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650760122782898e-05,
+ "loss": 0.8118,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6646104864174147e-05,
+ "loss": 0.9139,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664144702429485e-05,
+ "loss": 0.9026,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663678660495406e-05,
+ "loss": 0.8231,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663212360796183e-05,
+ "loss": 0.926,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662745803512921e-05,
+ "loss": 0.9112,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662278988826826e-05,
+ "loss": 0.9311,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6618119169192027e-05,
+ "loss": 0.9262,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.661344587971457e-05,
+ "loss": 0.985,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6608770021650945e-05,
+ "loss": 0.9365,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6604091596817193e-05,
+ "loss": 0.904,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6599410607030363e-05,
+ "loss": 0.9535,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6594727054108498e-05,
+ "loss": 0.9517,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.659004093987064e-05,
+ "loss": 0.9498,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6585352266136814e-05,
+ "loss": 0.9883,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6580661034728055e-05,
+ "loss": 0.8979,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6575967247466376e-05,
+ "loss": 0.8531,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.657127090617479e-05,
+ "loss": 0.8912,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656657201267731e-05,
+ "loss": 0.9086,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6561870568798927e-05,
+ "loss": 0.9344,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.655716657636562e-05,
+ "loss": 0.8746,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6552460037204382e-05,
+ "loss": 0.9204,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6547750953143168e-05,
+ "loss": 0.8635,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.654303932601093e-05,
+ "loss": 0.921,
+ "step": 1532
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6538325157637614e-05,
+ "loss": 0.9079,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.653360844985415e-05,
+ "loss": 0.3378,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.652888920449245e-05,
+ "loss": 0.9322,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6524167423385414e-05,
+ "loss": 0.9674,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.651944310836693e-05,
+ "loss": 0.9316,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6514716261271866e-05,
+ "loss": 0.8693,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6509986883936073e-05,
+ "loss": 0.8552,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.650525497819639e-05,
+ "loss": 0.3403,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6500520545890634e-05,
+ "loss": 0.9273,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6495783588857605e-05,
+ "loss": 0.9329,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.649104410893708e-05,
+ "loss": 0.334,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.648630210796982e-05,
+ "loss": 0.8976,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6481557587797562e-05,
+ "loss": 0.9389,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6476810550263023e-05,
+ "loss": 0.9542,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6472060997209898e-05,
+ "loss": 0.8812,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6467308930482863e-05,
+ "loss": 0.9048,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6462554351927558e-05,
+ "loss": 0.876,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6457797263390613e-05,
+ "loss": 0.8298,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6453037666719624e-05,
+ "loss": 0.9022,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6448275563763162e-05,
+ "loss": 0.8829,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.644351095637078e-05,
+ "loss": 0.8775,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6438743846392987e-05,
+ "loss": 0.9378,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6433974235681274e-05,
+ "loss": 0.9564,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6429202126088112e-05,
+ "loss": 0.8495,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6424427519466925e-05,
+ "loss": 0.9205,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641965041767212e-05,
+ "loss": 0.8454,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6414870822559064e-05,
+ "loss": 0.8951,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6410088735984103e-05,
+ "loss": 0.9077,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405304159804534e-05,
+ "loss": 0.931,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6400517095878644e-05,
+ "loss": 0.9085,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6395727546065665e-05,
+ "loss": 0.9335,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6390935512225806e-05,
+ "loss": 0.9383,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6386140996220232e-05,
+ "loss": 0.9486,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6381343999911088e-05,
+ "loss": 0.9645,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6376544525161463e-05,
+ "loss": 0.942,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6371742573835426e-05,
+ "loss": 0.8543,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.636693814779799e-05,
+ "loss": 0.9479,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6362131248915145e-05,
+ "loss": 0.8565,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6357321879053833e-05,
+ "loss": 0.9377,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6352510040081962e-05,
+ "loss": 0.9424,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634769573386839e-05,
+ "loss": 0.8644,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634287896228294e-05,
+ "loss": 0.8406,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6338059727196386e-05,
+ "loss": 0.9085,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6333238030480473e-05,
+ "loss": 0.9058,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6328413874007884e-05,
+ "loss": 0.942,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6323587259652267e-05,
+ "loss": 0.8775,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6318758189288227e-05,
+ "loss": 0.9459,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6313926664791316e-05,
+ "loss": 0.9458,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6309092688038047e-05,
+ "loss": 0.9369,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6304256260905872e-05,
+ "loss": 0.917,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6299417385273216e-05,
+ "loss": 0.9081,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.629457606301943e-05,
+ "loss": 0.3765,
+ "step": 1584
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6289732296024837e-05,
+ "loss": 0.9892,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6284886086170697e-05,
+ "loss": 0.9082,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.628003743533922e-05,
+ "loss": 0.3439,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6275186345413566e-05,
+ "loss": 0.9259,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627033281827785e-05,
+ "loss": 0.362,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265476855817116e-05,
+ "loss": 0.8515,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6260618459917366e-05,
+ "loss": 0.9138,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255757632465553e-05,
+ "loss": 0.9615,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.625089437534956e-05,
+ "loss": 0.9091,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624602869045822e-05,
+ "loss": 0.9077,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624116057968131e-05,
+ "loss": 0.9218,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6236290044909543e-05,
+ "loss": 1.0128,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6231417088034585e-05,
+ "loss": 0.9007,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.622654171094904e-05,
+ "loss": 0.3385,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6221663915546437e-05,
+ "loss": 0.9356,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6216783703721265e-05,
+ "loss": 0.9317,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6211901077368937e-05,
+ "loss": 0.8909,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.620701603838581e-05,
+ "loss": 0.9236,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6202128588669177e-05,
+ "loss": 0.8958,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619723873011727e-05,
+ "loss": 0.866,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6192346464629247e-05,
+ "loss": 0.8925,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6187451794105212e-05,
+ "loss": 0.851,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61825547204462e-05,
+ "loss": 0.879,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6177655245554177e-05,
+ "loss": 0.8873,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617275337133204e-05,
+ "loss": 0.8709,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6167849099683623e-05,
+ "loss": 0.8851,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6162942432513687e-05,
+ "loss": 0.9548,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6158033371727924e-05,
+ "loss": 0.9119,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153121919232962e-05,
+ "loss": 0.8921,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.614820807693635e-05,
+ "loss": 0.9396,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143291846746563e-05,
+ "loss": 0.9238,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613837323057301e-05,
+ "loss": 0.993,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6133452230326035e-05,
+ "loss": 0.8919,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6128528847916883e-05,
+ "loss": 0.8905,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123603085257746e-05,
+ "loss": 0.9203,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6118674944261732e-05,
+ "loss": 0.8348,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6113744426842882e-05,
+ "loss": 0.8634,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6108811534916137e-05,
+ "loss": 0.9083,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6103876270397387e-05,
+ "loss": 0.9459,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609893863520343e-05,
+ "loss": 0.8999,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609399863125198e-05,
+ "loss": 0.9203,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6089056260461687e-05,
+ "loss": 0.8976,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6084111524752107e-05,
+ "loss": 0.888,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607916442604372e-05,
+ "loss": 0.9126,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6074214966257914e-05,
+ "loss": 0.9084,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6069263147317015e-05,
+ "loss": 0.856,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6064308971144236e-05,
+ "loss": 0.9364,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605935243966374e-05,
+ "loss": 0.8824,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6054393554800574e-05,
+ "loss": 0.8943,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604943231848072e-05,
+ "loss": 0.9126,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604446873263106e-05,
+ "loss": 0.9869,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6039502799179394e-05,
+ "loss": 0.9221,
+ "step": 1636
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6034534520054435e-05,
+ "loss": 0.9395,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.60295638971858e-05,
+ "loss": 0.9287,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.602459093250403e-05,
+ "loss": 0.946,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601961562794056e-05,
+ "loss": 0.8648,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601463798542775e-05,
+ "loss": 0.3325,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6009658006898848e-05,
+ "loss": 0.8383,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600467569428803e-05,
+ "loss": 0.85,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599969104953036e-05,
+ "loss": 0.9851,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599470407456182e-05,
+ "loss": 0.8856,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989714771319297e-05,
+ "loss": 0.8561,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5984723141740578e-05,
+ "loss": 0.91,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.597972918776435e-05,
+ "loss": 0.9533,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5974732911330208e-05,
+ "loss": 0.9079,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5969734314378654e-05,
+ "loss": 0.8686,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5964733398851078e-05,
+ "loss": 0.9388,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5959730166689783e-05,
+ "loss": 0.9022,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5954724619837966e-05,
+ "loss": 0.869,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5949716760239722e-05,
+ "loss": 0.9018,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5944706589840046e-05,
+ "loss": 0.8919,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5939694110584833e-05,
+ "loss": 0.9298,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593467932442087e-05,
+ "loss": 0.8993,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5929662233295846e-05,
+ "loss": 0.8597,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5924642839158334e-05,
+ "loss": 0.9543,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.591962114395781e-05,
+ "loss": 0.8902,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5914597149644654e-05,
+ "loss": 0.9517,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5909570858170115e-05,
+ "loss": 0.8964,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5904542271486346e-05,
+ "loss": 0.8226,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5899511391546403e-05,
+ "loss": 0.9308,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5894478220304215e-05,
+ "loss": 0.8725,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5889442759714603e-05,
+ "loss": 0.357,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5884405011733294e-05,
+ "loss": 0.8884,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.587936497831688e-05,
+ "loss": 0.8428,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874322661422856e-05,
+ "loss": 0.8942,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5869278063009602e-05,
+ "loss": 0.9476,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.586423118503638e-05,
+ "loss": 0.8257,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.585918202946334e-05,
+ "loss": 0.9185,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5854130598251514e-05,
+ "loss": 0.9394,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5849076893362822e-05,
+ "loss": 0.9399,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.584402091676006e-05,
+ "loss": 0.9126,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5838962670406918e-05,
+ "loss": 0.9149,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5833902156267956e-05,
+ "loss": 0.938,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.582883937630862e-05,
+ "loss": 0.9592,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5823774332495236e-05,
+ "loss": 0.9069,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581870702679501e-05,
+ "loss": 0.918,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581363746117602e-05,
+ "loss": 0.8892,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580856563760724e-05,
+ "loss": 0.349,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5803491558058486e-05,
+ "loss": 0.9282,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579841522450049e-05,
+ "loss": 0.8366,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5793336638904838e-05,
+ "loss": 0.9711,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.578825580324399e-05,
+ "loss": 0.9039,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5783172719491288e-05,
+ "loss": 0.8891,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577808738962094e-05,
+ "loss": 0.843,
+ "step": 1688
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577299981560803e-05,
+ "loss": 0.3497,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5767909999428513e-05,
+ "loss": 0.9345,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.576281794305922e-05,
+ "loss": 0.93,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575772364847784e-05,
+ "loss": 0.9059,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575262711766294e-05,
+ "loss": 0.8892,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5747528352593956e-05,
+ "loss": 0.8448,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.574242735525119e-05,
+ "loss": 0.8748,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737324127615808e-05,
+ "loss": 0.9469,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5732218671669847e-05,
+ "loss": 0.9469,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727110989396205e-05,
+ "loss": 0.8397,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5722001082778645e-05,
+ "loss": 0.982,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716888953801805e-05,
+ "loss": 0.9181,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5711774604451168e-05,
+ "loss": 0.85,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706658036713093e-05,
+ "loss": 0.3376,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5701539252574795e-05,
+ "loss": 0.362,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5696418254024344e-05,
+ "loss": 0.8803,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.569129504305069e-05,
+ "loss": 0.904,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568616962164362e-05,
+ "loss": 0.8772,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5681041991793788e-05,
+ "loss": 0.8893,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567591215549271e-05,
+ "loss": 0.9031,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567078011473276e-05,
+ "loss": 0.868,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5665645871507152e-05,
+ "loss": 0.841,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5660509427809973e-05,
+ "loss": 0.8552,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565537078563616e-05,
+ "loss": 0.8433,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56502299469815e-05,
+ "loss": 0.8928,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564508691384264e-05,
+ "loss": 0.9244,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5639941688217063e-05,
+ "loss": 0.8972,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5634794272103126e-05,
+ "loss": 0.8691,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562964466750003e-05,
+ "loss": 0.898,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562449287640781e-05,
+ "loss": 0.9465,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5619338900827368e-05,
+ "loss": 0.8966,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5614182742760448e-05,
+ "loss": 0.9137,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5609024404209643e-05,
+ "loss": 0.9303,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5603863887178393e-05,
+ "loss": 0.8798,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5598701193670983e-05,
+ "loss": 0.9252,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559353632569254e-05,
+ "loss": 0.8918,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588369285249048e-05,
+ "loss": 0.8562,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5583200074347318e-05,
+ "loss": 0.9207,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.557802869499501e-05,
+ "loss": 0.8755,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5572855149200637e-05,
+ "loss": 0.9165,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5567679438973543e-05,
+ "loss": 0.9501,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5562501566323906e-05,
+ "loss": 0.9016,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.555732153326276e-05,
+ "loss": 0.9402,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5552139341801965e-05,
+ "loss": 0.8856,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.554695499395423e-05,
+ "loss": 0.8768,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5541768491733092e-05,
+ "loss": 0.9014,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5536579837152927e-05,
+ "loss": 0.8418,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5531389032228955e-05,
+ "loss": 0.8946,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.552619607897722e-05,
+ "loss": 0.9021,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.55210009794146e-05,
+ "loss": 0.8611,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5515803735558827e-05,
+ "loss": 0.9054,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5510604349428438e-05,
+ "loss": 0.9597,
+ "step": 1740
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.550540282304282e-05,
+ "loss": 0.8706,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.550019915842218e-05,
+ "loss": 0.9222,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.549499335758757e-05,
+ "loss": 0.9049,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.548978542256086e-05,
+ "loss": 0.9216,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5484575355364744e-05,
+ "loss": 0.906,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5479363158022763e-05,
+ "loss": 0.8817,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547414883255927e-05,
+ "loss": 0.8898,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.546893238099945e-05,
+ "loss": 0.8456,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5463713805369312e-05,
+ "loss": 0.8614,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5458493107695688e-05,
+ "loss": 0.8622,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5453270290006237e-05,
+ "loss": 0.8739,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544804535432945e-05,
+ "loss": 0.9129,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544281830269462e-05,
+ "loss": 0.8913,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437589137131882e-05,
+ "loss": 0.344,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5432357859672177e-05,
+ "loss": 0.3343,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542712447234728e-05,
+ "loss": 0.9442,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542188897718977e-05,
+ "loss": 0.8802,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5416651376233062e-05,
+ "loss": 0.9224,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5411411671511376e-05,
+ "loss": 0.3322,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5406169865059747e-05,
+ "loss": 0.8669,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5400925958914045e-05,
+ "loss": 0.8587,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5395679955110927e-05,
+ "loss": 0.9005,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53904318556879e-05,
+ "loss": 0.894,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5385181662683244e-05,
+ "loss": 0.9441,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5379929378136088e-05,
+ "loss": 0.8866,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5374675004086353e-05,
+ "loss": 0.8596,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5369418542574782e-05,
+ "loss": 0.9463,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536415999564292e-05,
+ "loss": 0.9096,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5358899365333123e-05,
+ "loss": 0.9065,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5353636653688563e-05,
+ "loss": 0.9263,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.534837186275322e-05,
+ "loss": 0.9296,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5343104994571877e-05,
+ "loss": 0.9103,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.533783605119012e-05,
+ "loss": 0.9263,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5332565034654344e-05,
+ "loss": 0.8902,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5327291947011763e-05,
+ "loss": 0.8487,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5322016790310373e-05,
+ "loss": 0.9495,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5316739566598985e-05,
+ "loss": 0.8295,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.531146027792722e-05,
+ "loss": 0.8741,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.530617892634548e-05,
+ "loss": 0.879,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5300895513904993e-05,
+ "loss": 0.8716,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.529561004265777e-05,
+ "loss": 0.3501,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5290322514656624e-05,
+ "loss": 0.9305,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5285032931955177e-05,
+ "loss": 0.8902,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527974129660784e-05,
+ "loss": 0.8797,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527444761066982e-05,
+ "loss": 0.9201,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5269151876197127e-05,
+ "loss": 0.8549,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5263854095246557e-05,
+ "loss": 0.9351,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5258554269875716e-05,
+ "loss": 0.9333,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5253252402142989e-05,
+ "loss": 0.914,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5247948494107566e-05,
+ "loss": 0.9352,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5242642547829416e-05,
+ "loss": 0.9375,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.523733456536931e-05,
+ "loss": 0.8714,
+ "step": 1792
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5232024548788813e-05,
+ "loss": 0.9665,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5226712500150267e-05,
+ "loss": 0.8637,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5221398421516816e-05,
+ "loss": 0.9001,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5216082314952383e-05,
+ "loss": 0.8326,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521076418252168e-05,
+ "loss": 0.9145,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5205444026290218e-05,
+ "loss": 0.9409,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5200121848324276e-05,
+ "loss": 0.899,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5194797650690926e-05,
+ "loss": 0.893,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5189471435458032e-05,
+ "loss": 0.9221,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5184143204694231e-05,
+ "loss": 0.7968,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5178812960468945e-05,
+ "loss": 0.9386,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5173480704852379e-05,
+ "loss": 0.8234,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5168146439915525e-05,
+ "loss": 0.8813,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5162810167730144e-05,
+ "loss": 0.8989,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5157471890368785e-05,
+ "loss": 0.9244,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152131609904773e-05,
+ "loss": 0.9251,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5146789328412213e-05,
+ "loss": 0.8993,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141445047965984e-05,
+ "loss": 0.342,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5136098770641741e-05,
+ "loss": 0.9025,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.513075049851592e-05,
+ "loss": 0.3408,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5125400233665728e-05,
+ "loss": 0.8834,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5120047978169146e-05,
+ "loss": 0.8897,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5114693734104926e-05,
+ "loss": 0.8895,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5109337503552594e-05,
+ "loss": 0.9055,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5103979288592454e-05,
+ "loss": 0.8434,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5098619091305571e-05,
+ "loss": 0.9234,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5093256913773786e-05,
+ "loss": 0.7853,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50878927580797e-05,
+ "loss": 0.9126,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5082526626306698e-05,
+ "loss": 0.3308,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5077158520538921e-05,
+ "loss": 0.821,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5071788442861277e-05,
+ "loss": 0.8598,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5066416395359444e-05,
+ "loss": 0.8984,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5061042380119864e-05,
+ "loss": 0.8945,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5055666399229743e-05,
+ "loss": 0.9365,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5050288454777047e-05,
+ "loss": 0.3325,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.504490854885051e-05,
+ "loss": 0.9344,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5039526683539627e-05,
+ "loss": 0.961,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5034142860934649e-05,
+ "loss": 0.8653,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5028757083126594e-05,
+ "loss": 0.8737,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5023369352207229e-05,
+ "loss": 0.8489,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5017979670269096e-05,
+ "loss": 0.8895,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.501258803940548e-05,
+ "loss": 0.9791,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500719446171043e-05,
+ "loss": 0.853,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500179893927875e-05,
+ "loss": 0.8926,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996401474205997e-05,
+ "loss": 0.3301,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4991002068588484e-05,
+ "loss": 0.9411,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985600724523282e-05,
+ "loss": 0.9024,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4980197444108205e-05,
+ "loss": 0.9483,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4974792229441826e-05,
+ "loss": 0.9167,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4969385082623473e-05,
+ "loss": 0.9055,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4963976005753216e-05,
+ "loss": 0.8377,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4958565000931877e-05,
+ "loss": 0.9333,
+ "step": 1844
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4953152070261027e-05,
+ "loss": 0.8977,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.494773721584299e-05,
+ "loss": 0.9427,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4942320439780833e-05,
+ "loss": 0.8719,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4936901744178367e-05,
+ "loss": 0.8691,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4931481131140149e-05,
+ "loss": 0.8912,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4926058602771484e-05,
+ "loss": 0.356,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4920634161178424e-05,
+ "loss": 0.8958,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4915207808467756e-05,
+ "loss": 0.8454,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4909779546747011e-05,
+ "loss": 0.9246,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4904349378124467e-05,
+ "loss": 0.8342,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.489891730470914e-05,
+ "loss": 0.913,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4893483328610778e-05,
+ "loss": 0.8311,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.488804745193988e-05,
+ "loss": 0.8267,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4882609676807675e-05,
+ "loss": 0.8444,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4877170005326136e-05,
+ "loss": 0.8312,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4871728439607967e-05,
+ "loss": 0.9051,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4866284981766607e-05,
+ "loss": 0.9088,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4860839633916236e-05,
+ "loss": 0.8904,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4855392398171762e-05,
+ "loss": 0.9397,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.484994327664883e-05,
+ "loss": 0.8327,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4844492271463814e-05,
+ "loss": 0.9035,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4839039384733821e-05,
+ "loss": 0.8804,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4833584618576695e-05,
+ "loss": 0.8932,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4828127975111e-05,
+ "loss": 0.9195,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4822669456456031e-05,
+ "loss": 0.8813,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4817209064731819e-05,
+ "loss": 0.9215,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4811746802059115e-05,
+ "loss": 0.9449,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.48062826705594e-05,
+ "loss": 0.8442,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4800816672354876e-05,
+ "loss": 0.8994,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4795348809568477e-05,
+ "loss": 0.8873,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4789879084323858e-05,
+ "loss": 0.8457,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4784407498745394e-05,
+ "loss": 0.9257,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.477893405495819e-05,
+ "loss": 0.9089,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4773458755088068e-05,
+ "loss": 0.9125,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4767981601261567e-05,
+ "loss": 0.9097,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4762502595605957e-05,
+ "loss": 0.877,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4757021740249213e-05,
+ "loss": 0.8929,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4751539037320044e-05,
+ "loss": 0.877,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4746054488947863e-05,
+ "loss": 0.9186,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4740568097262811e-05,
+ "loss": 0.9156,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473507986439573e-05,
+ "loss": 0.8942,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4729589792478193e-05,
+ "loss": 0.9098,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4724097883642482e-05,
+ "loss": 0.9079,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4718604140021588e-05,
+ "loss": 0.8696,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471310856374922e-05,
+ "loss": 0.8806,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.470761115695979e-05,
+ "loss": 0.8841,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702111921788437e-05,
+ "loss": 0.3691,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4696610860370997e-05,
+ "loss": 0.8541,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4691107974844015e-05,
+ "loss": 0.8623,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.468560326734475e-05,
+ "loss": 0.9252,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4680096740011172e-05,
+ "loss": 0.9047,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4674588394981948e-05,
+ "loss": 0.8317,
+ "step": 1896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4669078234396454e-05,
+ "loss": 0.8868,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4663566260394775e-05,
+ "loss": 0.9246,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4658052475117704e-05,
+ "loss": 0.9418,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4652536880706723e-05,
+ "loss": 0.977,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4647019479304028e-05,
+ "loss": 0.935,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4641500273052516e-05,
+ "loss": 0.881,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.463597926409578e-05,
+ "loss": 0.8564,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4630456454578122e-05,
+ "loss": 0.9335,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.462493184664453e-05,
+ "loss": 0.8867,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4619405442440702e-05,
+ "loss": 0.8895,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4613877244113033e-05,
+ "loss": 0.8933,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4608347253808605e-05,
+ "loss": 0.3472,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.460281547367521e-05,
+ "loss": 0.9395,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4597281905861318e-05,
+ "loss": 0.9227,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4591746552516109e-05,
+ "loss": 0.3281,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4586209415789452e-05,
+ "loss": 0.8451,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4580670497831904e-05,
+ "loss": 0.9553,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4575129800794718e-05,
+ "loss": 0.9058,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4569587326829834e-05,
+ "loss": 0.9239,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564043078089891e-05,
+ "loss": 0.8449,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4558497056728205e-05,
+ "loss": 0.9244,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4552949264898795e-05,
+ "loss": 0.8445,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4547399704756348e-05,
+ "loss": 0.8401,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541848378456255e-05,
+ "loss": 0.8877,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4536295288154594e-05,
+ "loss": 0.9163,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4530740436008111e-05,
+ "loss": 0.8836,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.452518382417425e-05,
+ "loss": 0.9343,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4519625454811135e-05,
+ "loss": 0.8714,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4514065330077575e-05,
+ "loss": 0.9157,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4508503452133053e-05,
+ "loss": 0.8121,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4502939823137744e-05,
+ "loss": 0.9604,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4497374445252496e-05,
+ "loss": 0.8782,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4491807320638835e-05,
+ "loss": 0.9134,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4486238451458972e-05,
+ "loss": 0.8633,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4480667839875786e-05,
+ "loss": 0.9408,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4475095488052843e-05,
+ "loss": 0.8734,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4469521398154381e-05,
+ "loss": 0.927,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4463945572345308e-05,
+ "loss": 0.898,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4458368012791213e-05,
+ "loss": 0.3371,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4452788721658355e-05,
+ "loss": 0.9782,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4447207701113669e-05,
+ "loss": 0.8966,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4441624953324755e-05,
+ "loss": 0.9522,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4436040480459891e-05,
+ "loss": 0.9068,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.443045428468802e-05,
+ "loss": 0.8675,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4424866368178761e-05,
+ "loss": 0.8399,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441927673310239e-05,
+ "loss": 0.8841,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4413685381629855e-05,
+ "loss": 0.9024,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440809231593278e-05,
+ "loss": 0.8847,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4402497538183444e-05,
+ "loss": 0.8777,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4396901050554794e-05,
+ "loss": 0.9327,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391302855220442e-05,
+ "loss": 0.8859,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4385702954354662e-05,
+ "loss": 0.8798,
+ "step": 1948
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.438010135013239e-05,
+ "loss": 0.9352,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4374498044729225e-05,
+ "loss": 0.3367,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4368893040321428e-05,
+ "loss": 0.9483,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4363286339085915e-05,
+ "loss": 0.8883,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.435767794320027e-05,
+ "loss": 0.9052,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4352067854842724e-05,
+ "loss": 0.8696,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434645607619217e-05,
+ "loss": 0.9393,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434084260942816e-05,
+ "loss": 0.9083,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4335227456730902e-05,
+ "loss": 0.3333,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4329610620281253e-05,
+ "loss": 1.0002,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4323992102260733e-05,
+ "loss": 0.8595,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4318371904851502e-05,
+ "loss": 0.8919,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4312750030236382e-05,
+ "loss": 0.8652,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4307126480598852e-05,
+ "loss": 0.8431,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4301501258123024e-05,
+ "loss": 0.9213,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4295874364993672e-05,
+ "loss": 0.9258,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4290245803396221e-05,
+ "loss": 0.9051,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4284615575516737e-05,
+ "loss": 0.8925,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4278983683541934e-05,
+ "loss": 0.3409,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4273350129659173e-05,
+ "loss": 0.872,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4267714916056465e-05,
+ "loss": 0.8765,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.426207804492246e-05,
+ "loss": 0.8701,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4256439518446456e-05,
+ "loss": 0.3227,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4250799338818388e-05,
+ "loss": 0.8665,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.424515750822884e-05,
+ "loss": 0.8541,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4239514028869032e-05,
+ "loss": 0.9197,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4233868902930827e-05,
+ "loss": 0.8156,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4228222132606729e-05,
+ "loss": 0.8795,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4222573720089874e-05,
+ "loss": 0.9272,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216923667574042e-05,
+ "loss": 0.8261,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4211271977253653e-05,
+ "loss": 0.9354,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4205618651323753e-05,
+ "loss": 0.9077,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4199963691980027e-05,
+ "loss": 0.8562,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4194307101418805e-05,
+ "loss": 0.917,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4188648881837033e-05,
+ "loss": 0.919,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4182989035432299e-05,
+ "loss": 0.8722,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4177327564402825e-05,
+ "loss": 0.8983,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4171664470947464e-05,
+ "loss": 0.9448,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416599975726569e-05,
+ "loss": 0.9818,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4160333425557616e-05,
+ "loss": 0.8398,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4154665478023977e-05,
+ "loss": 0.8986,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4148995916866139e-05,
+ "loss": 0.8588,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.41433247442861e-05,
+ "loss": 0.81,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4137651962486472e-05,
+ "loss": 0.3643,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4131977573670499e-05,
+ "loss": 0.931,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412630158004205e-05,
+ "loss": 0.9019,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4120623983805617e-05,
+ "loss": 0.9074,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4114944787166307e-05,
+ "loss": 0.9205,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4109263992329858e-05,
+ "loss": 0.9037,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4103581601502629e-05,
+ "loss": 0.3415,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.409789761689159e-05,
+ "loss": 0.8838,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4092212040704336e-05,
+ "loss": 0.8955,
+ "step": 2000
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.408652487514908e-05,
+ "loss": 0.9042,
+ "step": 2001
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.408083612243465e-05,
+ "loss": 0.3112,
+ "step": 2002
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4075145784770496e-05,
+ "loss": 0.9066,
+ "step": 2003
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4069453864366678e-05,
+ "loss": 0.9343,
+ "step": 2004
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063760363433867e-05,
+ "loss": 0.3589,
+ "step": 2005
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.405806528418336e-05,
+ "loss": 0.8626,
+ "step": 2006
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4052368628827057e-05,
+ "loss": 0.9043,
+ "step": 2007
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4046670399577478e-05,
+ "loss": 0.9,
+ "step": 2008
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4040970598647742e-05,
+ "loss": 0.344,
+ "step": 2009
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4035269228251589e-05,
+ "loss": 0.8871,
+ "step": 2010
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4029566290603368e-05,
+ "loss": 0.8177,
+ "step": 2011
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4023861787918031e-05,
+ "loss": 0.9115,
+ "step": 2012
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4018155722411144e-05,
+ "loss": 0.8899,
+ "step": 2013
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4012448096298874e-05,
+ "loss": 0.9085,
+ "step": 2014
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4006738911798001e-05,
+ "loss": 0.8505,
+ "step": 2015
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.40010281711259e-05,
+ "loss": 0.8961,
+ "step": 2016
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3995315876500565e-05,
+ "loss": 0.9091,
+ "step": 2017
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3989602030140581e-05,
+ "loss": 0.9234,
+ "step": 2018
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.398388663426514e-05,
+ "loss": 0.8828,
+ "step": 2019
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3978169691094037e-05,
+ "loss": 0.8319,
+ "step": 2020
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3972451202847665e-05,
+ "loss": 0.9261,
+ "step": 2021
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3966731171747024e-05,
+ "loss": 0.8725,
+ "step": 2022
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3961009600013702e-05,
+ "loss": 0.8844,
+ "step": 2023
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3955286489869894e-05,
+ "loss": 0.8844,
+ "step": 2024
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.394956184353839e-05,
+ "loss": 0.8217,
+ "step": 2025
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3943835663242577e-05,
+ "loss": 0.9053,
+ "step": 2026
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3938107951206438e-05,
+ "loss": 0.8662,
+ "step": 2027
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932378709654548e-05,
+ "loss": 0.8742,
+ "step": 2028
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3926647940812081e-05,
+ "loss": 0.889,
+ "step": 2029
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39209156469048e-05,
+ "loss": 0.8695,
+ "step": 2030
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3915181830159061e-05,
+ "loss": 0.9434,
+ "step": 2031
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3909446492801819e-05,
+ "loss": 0.8806,
+ "step": 2032
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3903709637060605e-05,
+ "loss": 0.9066,
+ "step": 2033
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3897971265163546e-05,
+ "loss": 0.8069,
+ "step": 2034
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3892231379339369e-05,
+ "loss": 0.8629,
+ "step": 2035
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3886489981817375e-05,
+ "loss": 0.9743,
+ "step": 2036
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3880747074827454e-05,
+ "loss": 0.8458,
+ "step": 2037
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3875002660600085e-05,
+ "loss": 0.8798,
+ "step": 2038
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386925674136634e-05,
+ "loss": 0.3686,
+ "step": 2039
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3863509319357857e-05,
+ "loss": 0.9186,
+ "step": 2040
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3857760396806876e-05,
+ "loss": 0.2869,
+ "step": 2041
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3852009975946209e-05,
+ "loss": 0.9297,
+ "step": 2042
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3846258059009252e-05,
+ "loss": 0.8997,
+ "step": 2043
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.384050464822999e-05,
+ "loss": 0.9275,
+ "step": 2044
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.383474974584297e-05,
+ "loss": 0.8687,
+ "step": 2045
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3828993354083342e-05,
+ "loss": 0.8515,
+ "step": 2046
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3823235475186816e-05,
+ "loss": 0.9319,
+ "step": 2047
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3817476111389685e-05,
+ "loss": 0.8814,
+ "step": 2048
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3811715264928824e-05,
+ "loss": 0.9526,
+ "step": 2049
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3805952938041674e-05,
+ "loss": 0.9228,
+ "step": 2050
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3800189132966257e-05,
+ "loss": 0.921,
+ "step": 2051
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3794423851941174e-05,
+ "loss": 0.8548,
+ "step": 2052
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.378865709720559e-05,
+ "loss": 0.359,
+ "step": 2053
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3782888870999245e-05,
+ "loss": 0.879,
+ "step": 2054
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.377711917556245e-05,
+ "loss": 0.8598,
+ "step": 2055
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3771348013136096e-05,
+ "loss": 0.9131,
+ "step": 2056
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765575385961627e-05,
+ "loss": 0.8867,
+ "step": 2057
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3759801296281072e-05,
+ "loss": 0.3295,
+ "step": 2058
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3754025746337014e-05,
+ "loss": 0.8873,
+ "step": 2059
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3748248738372616e-05,
+ "loss": 0.893,
+ "step": 2060
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3742470274631599e-05,
+ "loss": 0.9075,
+ "step": 2061
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3736690357358253e-05,
+ "loss": 0.9009,
+ "step": 2062
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3730908988797427e-05,
+ "loss": 0.8598,
+ "step": 2063
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3725126171194543e-05,
+ "loss": 0.8945,
+ "step": 2064
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.371934190679558e-05,
+ "loss": 0.9255,
+ "step": 2065
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3713556197847076e-05,
+ "loss": 0.8482,
+ "step": 2066
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3707769046596136e-05,
+ "loss": 0.8548,
+ "step": 2067
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3701980455290425e-05,
+ "loss": 0.8895,
+ "step": 2068
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3696190426178162e-05,
+ "loss": 0.8559,
+ "step": 2069
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3690398961508128e-05,
+ "loss": 0.3352,
+ "step": 2070
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3684606063529662e-05,
+ "loss": 0.3463,
+ "step": 2071
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3678811734492659e-05,
+ "loss": 0.9215,
+ "step": 2072
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.367301597664757e-05,
+ "loss": 0.8368,
+ "step": 2073
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.36672187922454e-05,
+ "loss": 0.9162,
+ "step": 2074
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3661420183537705e-05,
+ "loss": 0.9218,
+ "step": 2075
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3655620152776605e-05,
+ "loss": 0.8654,
+ "step": 2076
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.364981870221476e-05,
+ "loss": 0.8559,
+ "step": 2077
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.364401583410539e-05,
+ "loss": 0.8737,
+ "step": 2078
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3638211550702256e-05,
+ "loss": 0.9215,
+ "step": 2079
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.363240585425968e-05,
+ "loss": 0.875,
+ "step": 2080
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362659874703253e-05,
+ "loss": 0.8382,
+ "step": 2081
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3620790231276213e-05,
+ "loss": 0.9178,
+ "step": 2082
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3614980309246692e-05,
+ "loss": 0.841,
+ "step": 2083
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609168983200474e-05,
+ "loss": 0.9028,
+ "step": 2084
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3603356255394613e-05,
+ "loss": 0.8932,
+ "step": 2085
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597542128086702e-05,
+ "loss": 0.9153,
+ "step": 2086
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3591726603534885e-05,
+ "loss": 0.919,
+ "step": 2087
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585909683997842e-05,
+ "loss": 0.9014,
+ "step": 2088
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3580091371734798e-05,
+ "loss": 0.8677,
+ "step": 2089
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357427166900552e-05,
+ "loss": 0.8769,
+ "step": 2090
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3568450578070309e-05,
+ "loss": 0.9164,
+ "step": 2091
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3562628101190015e-05,
+ "loss": 0.9132,
+ "step": 2092
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3556804240626019e-05,
+ "loss": 0.9407,
+ "step": 2093
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3550978998640241e-05,
+ "loss": 0.8397,
+ "step": 2094
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3545152377495136e-05,
+ "loss": 0.9516,
+ "step": 2095
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3539324379453698e-05,
+ "loss": 0.889,
+ "step": 2096
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3533495006779455e-05,
+ "loss": 0.8179,
+ "step": 2097
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3527664261736471e-05,
+ "loss": 0.9019,
+ "step": 2098
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3521832146589335e-05,
+ "loss": 0.9463,
+ "step": 2099
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3515998663603174e-05,
+ "loss": 0.8962,
+ "step": 2100
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3510163815043647e-05,
+ "loss": 0.8493,
+ "step": 2101
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3504327603176943e-05,
+ "loss": 0.7998,
+ "step": 2102
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3498490030269782e-05,
+ "loss": 0.8454,
+ "step": 2103
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3492651098589398e-05,
+ "loss": 0.9641,
+ "step": 2104
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3486810810403578e-05,
+ "loss": 0.9498,
+ "step": 2105
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.348096916798062e-05,
+ "loss": 0.874,
+ "step": 2106
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3475126173589343e-05,
+ "loss": 0.8762,
+ "step": 2107
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3469281829499107e-05,
+ "loss": 0.8726,
+ "step": 2108
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3463436137979786e-05,
+ "loss": 0.9508,
+ "step": 2109
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3457589101301776e-05,
+ "loss": 0.8612,
+ "step": 2110
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451740721736005e-05,
+ "loss": 0.8746,
+ "step": 2111
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3445891001553905e-05,
+ "loss": 0.8859,
+ "step": 2112
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3440039943027452e-05,
+ "loss": 0.3673,
+ "step": 2113
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3434187548429126e-05,
+ "loss": 0.9588,
+ "step": 2114
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3428333820031922e-05,
+ "loss": 0.8508,
+ "step": 2115
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3422478760109371e-05,
+ "loss": 0.3524,
+ "step": 2116
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3416622370935507e-05,
+ "loss": 0.9202,
+ "step": 2117
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3410764654784885e-05,
+ "loss": 0.8818,
+ "step": 2118
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3404905613932573e-05,
+ "loss": 0.8155,
+ "step": 2119
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3399045250654152e-05,
+ "loss": 0.8723,
+ "step": 2120
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3393183567225724e-05,
+ "loss": 0.9067,
+ "step": 2121
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3387320565923901e-05,
+ "loss": 0.8955,
+ "step": 2122
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.33814562490258e-05,
+ "loss": 0.8651,
+ "step": 2123
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3375590618809056e-05,
+ "loss": 0.8972,
+ "step": 2124
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3369723677551813e-05,
+ "loss": 0.8249,
+ "step": 2125
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3363855427532724e-05,
+ "loss": 0.3413,
+ "step": 2126
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3357985871030948e-05,
+ "loss": 0.9409,
+ "step": 2127
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3352115010326155e-05,
+ "loss": 0.8396,
+ "step": 2128
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3346242847698516e-05,
+ "loss": 0.9014,
+ "step": 2129
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3340369385428713e-05,
+ "loss": 0.9671,
+ "step": 2130
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3334494625797936e-05,
+ "loss": 0.9021,
+ "step": 2131
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3328618571087867e-05,
+ "loss": 0.8415,
+ "step": 2132
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.33227412235807e-05,
+ "loss": 0.9408,
+ "step": 2133
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3316862585559132e-05,
+ "loss": 0.8912,
+ "step": 2134
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3310982659306352e-05,
+ "loss": 0.9067,
+ "step": 2135
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305101447106064e-05,
+ "loss": 0.878,
+ "step": 2136
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3299218951242456e-05,
+ "loss": 0.8873,
+ "step": 2137
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293335174000226e-05,
+ "loss": 0.9478,
+ "step": 2138
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.328745011766456e-05,
+ "loss": 0.9111,
+ "step": 2139
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281563784521154e-05,
+ "loss": 0.8652,
+ "step": 2140
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3275676176856185e-05,
+ "loss": 0.9174,
+ "step": 2141
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269787296956333e-05,
+ "loss": 0.8871,
+ "step": 2142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3263897147108778e-05,
+ "loss": 0.8863,
+ "step": 2143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3258005729601178e-05,
+ "loss": 0.8067,
+ "step": 2144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3252113046721692e-05,
+ "loss": 0.8547,
+ "step": 2145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3246219100758974e-05,
+ "loss": 0.8756,
+ "step": 2146
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3240323894002166e-05,
+ "loss": 0.9105,
+ "step": 2147
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3234427428740895e-05,
+ "loss": 0.9473,
+ "step": 2148
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3228529707265279e-05,
+ "loss": 0.8832,
+ "step": 2149
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322263073186593e-05,
+ "loss": 0.8785,
+ "step": 2150
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3216730504833938e-05,
+ "loss": 0.8199,
+ "step": 2151
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3210829028460883e-05,
+ "loss": 0.901,
+ "step": 2152
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3204926305038832e-05,
+ "loss": 0.869,
+ "step": 2153
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3199022336860335e-05,
+ "loss": 0.9062,
+ "step": 2154
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3193117126218425e-05,
+ "loss": 0.8614,
+ "step": 2155
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3187210675406617e-05,
+ "loss": 0.9126,
+ "step": 2156
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.318130298671891e-05,
+ "loss": 0.9333,
+ "step": 2157
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3175394062449777e-05,
+ "loss": 0.8785,
+ "step": 2158
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3169483904894185e-05,
+ "loss": 0.8519,
+ "step": 2159
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3163572516347565e-05,
+ "loss": 0.903,
+ "step": 2160
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3157659899105835e-05,
+ "loss": 0.9312,
+ "step": 2161
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.315174605546538e-05,
+ "loss": 0.912,
+ "step": 2162
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3145830987723081e-05,
+ "loss": 0.8795,
+ "step": 2163
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3139914698176273e-05,
+ "loss": 0.8608,
+ "step": 2164
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3133997189122777e-05,
+ "loss": 0.8786,
+ "step": 2165
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3128078462860887e-05,
+ "loss": 0.8291,
+ "step": 2166
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122158521689367e-05,
+ "loss": 0.8889,
+ "step": 2167
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3116237367907454e-05,
+ "loss": 0.8664,
+ "step": 2168
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3110315003814855e-05,
+ "loss": 0.8604,
+ "step": 2169
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3104391431711748e-05,
+ "loss": 0.8725,
+ "step": 2170
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309846665389878e-05,
+ "loss": 0.8544,
+ "step": 2171
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309254067267707e-05,
+ "loss": 0.8509,
+ "step": 2172
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3086613490348198e-05,
+ "loss": 0.898,
+ "step": 2173
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3080685109214208e-05,
+ "loss": 0.8706,
+ "step": 2174
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3074755531577628e-05,
+ "loss": 0.8691,
+ "step": 2175
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3068824759741428e-05,
+ "loss": 0.8378,
+ "step": 2176
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.306289279600905e-05,
+ "loss": 0.8933,
+ "step": 2177
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3056959642684404e-05,
+ "loss": 0.9406,
+ "step": 2178
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.305102530207186e-05,
+ "loss": 0.8409,
+ "step": 2179
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3045089776476246e-05,
+ "loss": 0.9124,
+ "step": 2180
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3039153068202853e-05,
+ "loss": 0.8653,
+ "step": 2181
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3033215179557424e-05,
+ "loss": 0.8899,
+ "step": 2182
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3027276112846172e-05,
+ "loss": 0.9423,
+ "step": 2183
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3021335870375763e-05,
+ "loss": 0.9004,
+ "step": 2184
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3015394454453316e-05,
+ "loss": 0.9119,
+ "step": 2185
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3009451867386411e-05,
+ "loss": 0.9051,
+ "step": 2186
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3003508111483077e-05,
+ "loss": 0.9389,
+ "step": 2187
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.29975631890518e-05,
+ "loss": 0.927,
+ "step": 2188
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2991617102401524e-05,
+ "loss": 0.8467,
+ "step": 2189
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2985669853841635e-05,
+ "loss": 0.8822,
+ "step": 2190
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.297972144568198e-05,
+ "loss": 0.8567,
+ "step": 2191
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2973771880232853e-05,
+ "loss": 0.869,
+ "step": 2192
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2967821159804994e-05,
+ "loss": 0.8298,
+ "step": 2193
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961869286709594e-05,
+ "loss": 0.8769,
+ "step": 2194
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.295591626325829e-05,
+ "loss": 0.9496,
+ "step": 2195
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949962091763174e-05,
+ "loss": 0.8732,
+ "step": 2196
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2944006774536773e-05,
+ "loss": 0.9003,
+ "step": 2197
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2938050313892062e-05,
+ "loss": 0.3477,
+ "step": 2198
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2932092712142468e-05,
+ "loss": 0.8843,
+ "step": 2199
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.292613397160185e-05,
+ "loss": 0.3639,
+ "step": 2200
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2920174094584514e-05,
+ "loss": 0.8033,
+ "step": 2201
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2914213083405211e-05,
+ "loss": 0.8932,
+ "step": 2202
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2908250940379124e-05,
+ "loss": 0.8685,
+ "step": 2203
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2902287667821885e-05,
+ "loss": 0.8218,
+ "step": 2204
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.289632326804956e-05,
+ "loss": 0.9003,
+ "step": 2205
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2890357743378649e-05,
+ "loss": 0.8658,
+ "step": 2206
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2884391096126098e-05,
+ "loss": 0.3369,
+ "step": 2207
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2878423328609281e-05,
+ "loss": 0.8542,
+ "step": 2208
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2872454443146015e-05,
+ "loss": 0.8544,
+ "step": 2209
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286648444205454e-05,
+ "loss": 0.9076,
+ "step": 2210
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2860513327653537e-05,
+ "loss": 0.3334,
+ "step": 2211
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2854541102262119e-05,
+ "loss": 0.9436,
+ "step": 2212
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.284856776819983e-05,
+ "loss": 0.8395,
+ "step": 2213
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2842593327786649e-05,
+ "loss": 0.9054,
+ "step": 2214
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2836617783342968e-05,
+ "loss": 0.8798,
+ "step": 2215
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2830641137189628e-05,
+ "loss": 0.8432,
+ "step": 2216
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.282466339164789e-05,
+ "loss": 0.876,
+ "step": 2217
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2818684549039437e-05,
+ "loss": 0.9121,
+ "step": 2218
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2812704611686386e-05,
+ "loss": 0.9487,
+ "step": 2219
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2806723581911274e-05,
+ "loss": 0.889,
+ "step": 2220
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800741462037065e-05,
+ "loss": 0.8988,
+ "step": 2221
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2794758254387147e-05,
+ "loss": 0.3435,
+ "step": 2222
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2788773961285323e-05,
+ "loss": 0.8443,
+ "step": 2223
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2782788585055829e-05,
+ "loss": 0.9681,
+ "step": 2224
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2776802128023317e-05,
+ "loss": 0.8569,
+ "step": 2225
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2770814592512853e-05,
+ "loss": 0.8902,
+ "step": 2226
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2764825980849931e-05,
+ "loss": 0.8468,
+ "step": 2227
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2758836295360455e-05,
+ "loss": 0.8717,
+ "step": 2228
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2752845538370752e-05,
+ "loss": 0.7939,
+ "step": 2229
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2746853712207567e-05,
+ "loss": 0.8697,
+ "step": 2230
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.274086081919805e-05,
+ "loss": 0.8292,
+ "step": 2231
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273486686166977e-05,
+ "loss": 0.9003,
+ "step": 2232
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2728871841950719e-05,
+ "loss": 0.8565,
+ "step": 2233
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2722875762369288e-05,
+ "loss": 0.9289,
+ "step": 2234
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2716878625254287e-05,
+ "loss": 0.9051,
+ "step": 2235
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2710880432934934e-05,
+ "loss": 0.9049,
+ "step": 2236
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.270488118774086e-05,
+ "loss": 0.8711,
+ "step": 2237
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26988808920021e-05,
+ "loss": 0.9489,
+ "step": 2238
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26928795480491e-05,
+ "loss": 0.8633,
+ "step": 2239
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2686877158212715e-05,
+ "loss": 0.8904,
+ "step": 2240
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26808737248242e-05,
+ "loss": 0.8486,
+ "step": 2241
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2674869250215225e-05,
+ "loss": 0.8207,
+ "step": 2242
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2668863736717855e-05,
+ "loss": 0.8879,
+ "step": 2243
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2662857186664558e-05,
+ "loss": 0.8753,
+ "step": 2244
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2656849602388222e-05,
+ "loss": 0.8731,
+ "step": 2245
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2650840986222111e-05,
+ "loss": 0.8443,
+ "step": 2246
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2644831340499906e-05,
+ "loss": 0.839,
+ "step": 2247
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2638820667555685e-05,
+ "loss": 0.9184,
+ "step": 2248
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2632808969723927e-05,
+ "loss": 0.8163,
+ "step": 2249
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26267962493395e-05,
+ "loss": 0.9418,
+ "step": 2250
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2620782508737678e-05,
+ "loss": 0.8629,
+ "step": 2251
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614767750254129e-05,
+ "loss": 0.3414,
+ "step": 2252
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2608751976224916e-05,
+ "loss": 0.8288,
+ "step": 2253
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2602735188986498e-05,
+ "loss": 0.9099,
+ "step": 2254
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2596717390875721e-05,
+ "loss": 0.8752,
+ "step": 2255
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2590698584229834e-05,
+ "loss": 0.9098,
+ "step": 2256
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2584678771386467e-05,
+ "loss": 0.9381,
+ "step": 2257
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2578657954683651e-05,
+ "loss": 0.88,
+ "step": 2258
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2572636136459799e-05,
+ "loss": 0.9191,
+ "step": 2259
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2566613319053713e-05,
+ "loss": 0.851,
+ "step": 2260
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2560589504804592e-05,
+ "loss": 0.9334,
+ "step": 2261
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2554564696052011e-05,
+ "loss": 0.9118,
+ "step": 2262
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2548538895135942e-05,
+ "loss": 0.33,
+ "step": 2263
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.254251210439673e-05,
+ "loss": 0.8842,
+ "step": 2264
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2536484326175114e-05,
+ "loss": 0.893,
+ "step": 2265
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2530455562812214e-05,
+ "loss": 0.8388,
+ "step": 2266
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252442581664953e-05,
+ "loss": 0.8895,
+ "step": 2267
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2518395090028952e-05,
+ "loss": 0.946,
+ "step": 2268
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2512363385292739e-05,
+ "loss": 0.8648,
+ "step": 2269
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2506330704783533e-05,
+ "loss": 0.9077,
+ "step": 2270
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2500297050844367e-05,
+ "loss": 0.908,
+ "step": 2271
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2494262425818637e-05,
+ "loss": 0.9121,
+ "step": 2272
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2488226832050116e-05,
+ "loss": 0.9672,
+ "step": 2273
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2482190271882973e-05,
+ "loss": 0.8562,
+ "step": 2274
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2476152747661727e-05,
+ "loss": 0.8644,
+ "step": 2275
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2470114261731288e-05,
+ "loss": 0.8771,
+ "step": 2276
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.246407481643693e-05,
+ "loss": 0.8566,
+ "step": 2277
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.245803441412431e-05,
+ "loss": 0.8637,
+ "step": 2278
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451993057139445e-05,
+ "loss": 0.882,
+ "step": 2279
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2445950747828732e-05,
+ "loss": 0.8815,
+ "step": 2280
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439907488538934e-05,
+ "loss": 0.869,
+ "step": 2281
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.243386328161718e-05,
+ "loss": 0.9532,
+ "step": 2282
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2427818129410975e-05,
+ "loss": 0.8998,
+ "step": 2283
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2421772034268187e-05,
+ "loss": 0.8572,
+ "step": 2284
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2415724998537042e-05,
+ "loss": 0.8814,
+ "step": 2285
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2409677024566145e-05,
+ "loss": 0.9488,
+ "step": 2286
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.240362811470446e-05,
+ "loss": 0.8974,
+ "step": 2287
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2397578271301312e-05,
+ "loss": 0.9158,
+ "step": 2288
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2391527496706389e-05,
+ "loss": 0.8891,
+ "step": 2289
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2385475793269744e-05,
+ "loss": 0.8739,
+ "step": 2290
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2379423163341791e-05,
+ "loss": 0.8984,
+ "step": 2291
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2373369609273299e-05,
+ "loss": 0.8428,
+ "step": 2292
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2367315133415396e-05,
+ "loss": 0.8553,
+ "step": 2293
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2361259738119575e-05,
+ "loss": 0.8725,
+ "step": 2294
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2355203425737683e-05,
+ "loss": 0.8022,
+ "step": 2295
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2349146198621917e-05,
+ "loss": 0.8851,
+ "step": 2296
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2343088059124839e-05,
+ "loss": 0.8787,
+ "step": 2297
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2337029009599357e-05,
+ "loss": 0.3428,
+ "step": 2298
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2330969052398735e-05,
+ "loss": 0.9392,
+ "step": 2299
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2324908189876597e-05,
+ "loss": 0.8819,
+ "step": 2300
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2318846424386907e-05,
+ "loss": 0.9341,
+ "step": 2301
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2312783758283981e-05,
+ "loss": 0.9437,
+ "step": 2302
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.23067201939225e-05,
+ "loss": 0.972,
+ "step": 2303
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300655733657475e-05,
+ "loss": 0.844,
+ "step": 2304
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2294590379844268e-05,
+ "loss": 0.8892,
+ "step": 2305
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288524134838602e-05,
+ "loss": 0.9031,
+ "step": 2306
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2282457000996533e-05,
+ "loss": 0.9197,
+ "step": 2307
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2276388980674465e-05,
+ "loss": 0.8836,
+ "step": 2308
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.227032007622915e-05,
+ "loss": 0.8728,
+ "step": 2309
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2264250290017675e-05,
+ "loss": 0.8441,
+ "step": 2310
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2258179624397477e-05,
+ "loss": 0.7777,
+ "step": 2311
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2252108081726337e-05,
+ "loss": 0.8415,
+ "step": 2312
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.224603566436237e-05,
+ "loss": 0.9049,
+ "step": 2313
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2239962374664029e-05,
+ "loss": 0.8827,
+ "step": 2314
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2233888214990113e-05,
+ "loss": 0.3561,
+ "step": 2315
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2227813187699757e-05,
+ "loss": 0.8743,
+ "step": 2316
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.222173729515243e-05,
+ "loss": 0.8895,
+ "step": 2317
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2215660539707936e-05,
+ "loss": 0.9133,
+ "step": 2318
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2209582923726424e-05,
+ "loss": 0.8837,
+ "step": 2319
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2203504449568361e-05,
+ "loss": 0.9187,
+ "step": 2320
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2197425119594563e-05,
+ "loss": 0.9186,
+ "step": 2321
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.219134493616617e-05,
+ "loss": 0.8447,
+ "step": 2322
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2185263901644653e-05,
+ "loss": 0.8874,
+ "step": 2323
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.217918201839182e-05,
+ "loss": 0.9798,
+ "step": 2324
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2173099288769799e-05,
+ "loss": 0.913,
+ "step": 2325
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2167015715141057e-05,
+ "loss": 0.9146,
+ "step": 2326
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.216093129986838e-05,
+ "loss": 0.8392,
+ "step": 2327
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2154846045314885e-05,
+ "loss": 0.8755,
+ "step": 2328
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.214875995384402e-05,
+ "loss": 0.8648,
+ "step": 2329
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.214267302781955e-05,
+ "loss": 0.8526,
+ "step": 2330
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136585269605558e-05,
+ "loss": 0.9036,
+ "step": 2331
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2130496681566475e-05,
+ "loss": 0.8257,
+ "step": 2332
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.212440726606703e-05,
+ "loss": 0.8487,
+ "step": 2333
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.211831702547228e-05,
+ "loss": 0.8688,
+ "step": 2334
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2112225962147605e-05,
+ "loss": 0.8857,
+ "step": 2335
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.210613407845871e-05,
+ "loss": 0.3028,
+ "step": 2336
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2100041376771605e-05,
+ "loss": 0.849,
+ "step": 2337
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209394785945263e-05,
+ "loss": 0.8291,
+ "step": 2338
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2087853528868432e-05,
+ "loss": 0.8964,
+ "step": 2339
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2081758387385982e-05,
+ "loss": 0.8533,
+ "step": 2340
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2075662437372567e-05,
+ "loss": 0.9254,
+ "step": 2341
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2069565681195776e-05,
+ "loss": 0.34,
+ "step": 2342
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.206346812122352e-05,
+ "loss": 0.8912,
+ "step": 2343
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2057369759824025e-05,
+ "loss": 0.9265,
+ "step": 2344
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2051270599365825e-05,
+ "loss": 0.9092,
+ "step": 2345
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2045170642217756e-05,
+ "loss": 0.8999,
+ "step": 2346
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2039069890748978e-05,
+ "loss": 0.9081,
+ "step": 2347
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2032968347328952e-05,
+ "loss": 0.8721,
+ "step": 2348
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2026866014327446e-05,
+ "loss": 0.92,
+ "step": 2349
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2020762894114535e-05,
+ "loss": 0.8671,
+ "step": 2350
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.20146589890606e-05,
+ "loss": 0.902,
+ "step": 2351
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2008554301536328e-05,
+ "loss": 0.8555,
+ "step": 2352
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2002448833912712e-05,
+ "loss": 0.8908,
+ "step": 2353
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1996342588561042e-05,
+ "loss": 0.862,
+ "step": 2354
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1990235567852917e-05,
+ "loss": 0.8971,
+ "step": 2355
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984127774160226e-05,
+ "loss": 0.8591,
+ "step": 2356
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1978019209855174e-05,
+ "loss": 0.8669,
+ "step": 2357
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1971909877310253e-05,
+ "loss": 0.807,
+ "step": 2358
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1965799778898258e-05,
+ "loss": 0.9165,
+ "step": 2359
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1959688916992279e-05,
+ "loss": 0.8866,
+ "step": 2360
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1953577293965707e-05,
+ "loss": 0.9545,
+ "step": 2361
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1947464912192228e-05,
+ "loss": 0.953,
+ "step": 2362
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1941351774045815e-05,
+ "loss": 0.3617,
+ "step": 2363
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1935237881900743e-05,
+ "loss": 0.8426,
+ "step": 2364
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1929123238131579e-05,
+ "loss": 0.8996,
+ "step": 2365
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1923007845113178e-05,
+ "loss": 0.9363,
+ "step": 2366
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1916891705220689e-05,
+ "loss": 0.792,
+ "step": 2367
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.191077482082955e-05,
+ "loss": 0.8611,
+ "step": 2368
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1904657194315486e-05,
+ "loss": 0.8716,
+ "step": 2369
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1898538828054517e-05,
+ "loss": 0.8647,
+ "step": 2370
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1892419724422946e-05,
+ "loss": 0.8368,
+ "step": 2371
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1886299885797357e-05,
+ "loss": 0.8444,
+ "step": 2372
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1880179314554629e-05,
+ "loss": 0.8683,
+ "step": 2373
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1874058013071923e-05,
+ "loss": 0.8961,
+ "step": 2374
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1867935983726676e-05,
+ "loss": 0.8572,
+ "step": 2375
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.186181322889662e-05,
+ "loss": 0.8506,
+ "step": 2376
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1855689750959759e-05,
+ "loss": 0.8712,
+ "step": 2377
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1849565552294379e-05,
+ "loss": 0.3329,
+ "step": 2378
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1843440635279056e-05,
+ "loss": 0.893,
+ "step": 2379
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1837315002292629e-05,
+ "loss": 0.8584,
+ "step": 2380
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1831188655714225e-05,
+ "loss": 0.876,
+ "step": 2381
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.182506159792325e-05,
+ "loss": 0.9164,
+ "step": 2382
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1818933831299381e-05,
+ "loss": 0.3203,
+ "step": 2383
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1812805358222571e-05,
+ "loss": 0.8582,
+ "step": 2384
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.180667618107305e-05,
+ "loss": 0.8843,
+ "step": 2385
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1800546302231317e-05,
+ "loss": 0.8675,
+ "step": 2386
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1794415724078147e-05,
+ "loss": 0.834,
+ "step": 2387
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1788284448994588e-05,
+ "loss": 0.8438,
+ "step": 2388
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1782152479361956e-05,
+ "loss": 0.8646,
+ "step": 2389
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1776019817561834e-05,
+ "loss": 0.9151,
+ "step": 2390
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769886465976086e-05,
+ "loss": 0.8176,
+ "step": 2391
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1763752426986823e-05,
+ "loss": 0.9262,
+ "step": 2392
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1757617702976443e-05,
+ "loss": 0.832,
+ "step": 2393
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.17514822963276e-05,
+ "loss": 0.8366,
+ "step": 2394
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1745346209423216e-05,
+ "loss": 0.858,
+ "step": 2395
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1739209444646479e-05,
+ "loss": 0.3131,
+ "step": 2396
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1733072004380827e-05,
+ "loss": 0.9138,
+ "step": 2397
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1726933891009985e-05,
+ "loss": 0.847,
+ "step": 2398
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1720795106917917e-05,
+ "loss": 0.927,
+ "step": 2399
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.171465565448886e-05,
+ "loss": 0.8945,
+ "step": 2400
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1708515536107299e-05,
+ "loss": 0.8702,
+ "step": 2401
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1702374754157998e-05,
+ "loss": 0.9371,
+ "step": 2402
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1696233311025957e-05,
+ "loss": 0.8477,
+ "step": 2403
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1690091209096441e-05,
+ "loss": 0.8746,
+ "step": 2404
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1683948450754976e-05,
+ "loss": 0.8714,
+ "step": 2405
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1677805038387337e-05,
+ "loss": 0.8483,
+ "step": 2406
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1671660974379554e-05,
+ "loss": 0.8766,
+ "step": 2407
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1665516261117914e-05,
+ "loss": 0.8786,
+ "step": 2408
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1659370900988946e-05,
+ "loss": 0.9222,
+ "step": 2409
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.165322489637944e-05,
+ "loss": 0.8537,
+ "step": 2410
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.164707824967644e-05,
+ "loss": 0.3372,
+ "step": 2411
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1640930963267226e-05,
+ "loss": 0.8816,
+ "step": 2412
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1634783039539328e-05,
+ "loss": 0.91,
+ "step": 2413
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.162863448088054e-05,
+ "loss": 0.8229,
+ "step": 2414
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1622485289678886e-05,
+ "loss": 0.8628,
+ "step": 2415
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1616335468322641e-05,
+ "loss": 0.929,
+ "step": 2416
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1610185019200324e-05,
+ "loss": 0.8675,
+ "step": 2417
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1604033944700701e-05,
+ "loss": 0.8585,
+ "step": 2418
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1597882247212776e-05,
+ "loss": 0.8846,
+ "step": 2419
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.15917299291258e-05,
+ "loss": 0.9459,
+ "step": 2420
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1585576992829261e-05,
+ "loss": 0.8492,
+ "step": 2421
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1579423440712887e-05,
+ "loss": 0.8401,
+ "step": 2422
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1573269275166652e-05,
+ "loss": 0.839,
+ "step": 2423
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1567114498580758e-05,
+ "loss": 0.876,
+ "step": 2424
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1560959113345649e-05,
+ "loss": 0.8728,
+ "step": 2425
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1554803121852005e-05,
+ "loss": 0.8891,
+ "step": 2426
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1548646526490749e-05,
+ "loss": 0.8827,
+ "step": 2427
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1542489329653024e-05,
+ "loss": 0.8866,
+ "step": 2428
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.153633153373022e-05,
+ "loss": 0.3399,
+ "step": 2429
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1530173141113947e-05,
+ "loss": 0.9129,
+ "step": 2430
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1524014154196063e-05,
+ "loss": 0.8873,
+ "step": 2431
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1517854575368644e-05,
+ "loss": 0.9095,
+ "step": 2432
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1511694407023994e-05,
+ "loss": 0.845,
+ "step": 2433
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1505533651554654e-05,
+ "loss": 0.9395,
+ "step": 2434
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1499372311353398e-05,
+ "loss": 0.8332,
+ "step": 2435
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.149321038881321e-05,
+ "loss": 0.8609,
+ "step": 2436
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1487047886327314e-05,
+ "loss": 0.8932,
+ "step": 2437
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1480884806289151e-05,
+ "loss": 0.8232,
+ "step": 2438
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1474721151092397e-05,
+ "loss": 0.8671,
+ "step": 2439
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1468556923130943e-05,
+ "loss": 0.3353,
+ "step": 2440
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14623921247989e-05,
+ "loss": 0.8704,
+ "step": 2441
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1456226758490603e-05,
+ "loss": 0.8445,
+ "step": 2442
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1450060826600618e-05,
+ "loss": 0.814,
+ "step": 2443
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1443894331523718e-05,
+ "loss": 0.8857,
+ "step": 2444
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437727275654893e-05,
+ "loss": 0.9106,
+ "step": 2445
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1431559661389362e-05,
+ "loss": 0.8441,
+ "step": 2446
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1425391491122557e-05,
+ "loss": 0.3749,
+ "step": 2447
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.141922276725012e-05,
+ "loss": 0.8948,
+ "step": 2448
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1413053492167915e-05,
+ "loss": 0.9103,
+ "step": 2449
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1406883668272015e-05,
+ "loss": 0.8903,
+ "step": 2450
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.140071329795871e-05,
+ "loss": 0.8281,
+ "step": 2451
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.13945423836245e-05,
+ "loss": 0.9291,
+ "step": 2452
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1388370927666102e-05,
+ "loss": 0.8681,
+ "step": 2453
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1382198932480429e-05,
+ "loss": 0.84,
+ "step": 2454
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1376026400464616e-05,
+ "loss": 0.884,
+ "step": 2455
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.136985333401601e-05,
+ "loss": 0.9238,
+ "step": 2456
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1363679735532151e-05,
+ "loss": 0.8118,
+ "step": 2457
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1357505607410797e-05,
+ "loss": 0.8997,
+ "step": 2458
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1351330952049908e-05,
+ "loss": 0.3291,
+ "step": 2459
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1345155771847646e-05,
+ "loss": 0.8934,
+ "step": 2460
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1338980069202388e-05,
+ "loss": 0.839,
+ "step": 2461
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1332803846512697e-05,
+ "loss": 0.8599,
+ "step": 2462
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1326627106177348e-05,
+ "loss": 0.8432,
+ "step": 2463
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.132044985059532e-05,
+ "loss": 0.8947,
+ "step": 2464
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1314272082165785e-05,
+ "loss": 0.8495,
+ "step": 2465
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1308093803288119e-05,
+ "loss": 0.9166,
+ "step": 2466
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130191501636189e-05,
+ "loss": 0.8832,
+ "step": 2467
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1295735723786872e-05,
+ "loss": 0.8477,
+ "step": 2468
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1289555927963032e-05,
+ "loss": 0.8675,
+ "step": 2469
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283375631290528e-05,
+ "loss": 0.8931,
+ "step": 2470
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1277194836169714e-05,
+ "loss": 0.8718,
+ "step": 2471
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1271013545001144e-05,
+ "loss": 0.9066,
+ "step": 2472
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1264831760185562e-05,
+ "loss": 0.9353,
+ "step": 2473
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1258649484123895e-05,
+ "loss": 0.83,
+ "step": 2474
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1252466719217274e-05,
+ "loss": 0.8601,
+ "step": 2475
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1246283467867012e-05,
+ "loss": 0.856,
+ "step": 2476
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1240099732474613e-05,
+ "loss": 0.8202,
+ "step": 2477
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1233915515441765e-05,
+ "loss": 0.8848,
+ "step": 2478
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1227730819170349e-05,
+ "loss": 0.8669,
+ "step": 2479
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1221545646062431e-05,
+ "loss": 0.8553,
+ "step": 2480
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.121535999852026e-05,
+ "loss": 0.9064,
+ "step": 2481
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1209173878946271e-05,
+ "loss": 0.8003,
+ "step": 2482
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1202987289743078e-05,
+ "loss": 0.7714,
+ "step": 2483
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1196800233313488e-05,
+ "loss": 0.8098,
+ "step": 2484
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1190612712060475e-05,
+ "loss": 0.9308,
+ "step": 2485
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1184424728387204e-05,
+ "loss": 0.8383,
+ "step": 2486
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1178236284697017e-05,
+ "loss": 0.9422,
+ "step": 2487
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1172047383393434e-05,
+ "loss": 0.9478,
+ "step": 2488
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1165858026880151e-05,
+ "loss": 0.8395,
+ "step": 2489
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1159668217561048e-05,
+ "loss": 0.883,
+ "step": 2490
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115347795784017e-05,
+ "loss": 0.8678,
+ "step": 2491
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1147287250121745e-05,
+ "loss": 0.8559,
+ "step": 2492
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1141096096810174e-05,
+ "loss": 0.8701,
+ "step": 2493
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1134904500310029e-05,
+ "loss": 0.9352,
+ "step": 2494
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1128712463026048e-05,
+ "loss": 0.9426,
+ "step": 2495
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1122519987363156e-05,
+ "loss": 0.89,
+ "step": 2496
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116327075726436e-05,
+ "loss": 0.8317,
+ "step": 2497
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1110133730521142e-05,
+ "loss": 0.914,
+ "step": 2498
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.11039399541527e-05,
+ "loss": 0.8242,
+ "step": 2499
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10977457490267e-05,
+ "loss": 0.8806,
+ "step": 2500
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10915511175489e-05,
+ "loss": 0.8181,
+ "step": 2501
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1085356062125225e-05,
+ "loss": 0.8941,
+ "step": 2502
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1079160585161759e-05,
+ "loss": 0.7827,
+ "step": 2503
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.107296468906476e-05,
+ "loss": 0.8903,
+ "step": 2504
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.106676837624064e-05,
+ "loss": 0.9281,
+ "step": 2505
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1060571649095972e-05,
+ "loss": 0.3481,
+ "step": 2506
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10543745100375e-05,
+ "loss": 0.8976,
+ "step": 2507
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1048176961472114e-05,
+ "loss": 0.8353,
+ "step": 2508
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1041979005806876e-05,
+ "loss": 0.9495,
+ "step": 2509
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1035780645449001e-05,
+ "loss": 0.8773,
+ "step": 2510
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1029581882805857e-05,
+ "loss": 0.9464,
+ "step": 2511
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1023382720284973e-05,
+ "loss": 0.9351,
+ "step": 2512
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1017183160294033e-05,
+ "loss": 0.9049,
+ "step": 2513
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1010983205240878e-05,
+ "loss": 0.8924,
+ "step": 2514
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1004782857533488e-05,
+ "loss": 0.9106,
+ "step": 2515
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.099858211958002e-05,
+ "loss": 0.8567,
+ "step": 2516
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0992380993788763e-05,
+ "loss": 0.82,
+ "step": 2517
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0986179482568162e-05,
+ "loss": 0.8759,
+ "step": 2518
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0979977588326815e-05,
+ "loss": 0.9139,
+ "step": 2519
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0973775313473465e-05,
+ "loss": 0.8956,
+ "step": 2520
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0967572660417001e-05,
+ "loss": 0.7797,
+ "step": 2521
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0961369631566468e-05,
+ "loss": 0.8928,
+ "step": 2522
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0955166229331048e-05,
+ "loss": 0.8539,
+ "step": 2523
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0948962456120068e-05,
+ "loss": 0.8613,
+ "step": 2524
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0942758314343007e-05,
+ "loss": 0.8727,
+ "step": 2525
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936553806409482e-05,
+ "loss": 0.9138,
+ "step": 2526
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0930348934729249e-05,
+ "loss": 0.9008,
+ "step": 2527
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0924143701712211e-05,
+ "loss": 0.9609,
+ "step": 2528
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0917938109768404e-05,
+ "loss": 0.9213,
+ "step": 2529
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0911732161308014e-05,
+ "loss": 0.8708,
+ "step": 2530
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0905525858741364e-05,
+ "loss": 0.9063,
+ "step": 2531
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.08993192044789e-05,
+ "loss": 0.8935,
+ "step": 2532
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.089311220093122e-05,
+ "loss": 0.8919,
+ "step": 2533
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0886904850509052e-05,
+ "loss": 0.9275,
+ "step": 2534
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0880697155623264e-05,
+ "loss": 0.8895,
+ "step": 2535
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0874489118684846e-05,
+ "loss": 0.84,
+ "step": 2536
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.086828074210493e-05,
+ "loss": 0.8531,
+ "step": 2537
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0862072028294777e-05,
+ "loss": 0.8842,
+ "step": 2538
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0855862979665788e-05,
+ "loss": 0.8564,
+ "step": 2539
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0849653598629477e-05,
+ "loss": 0.8437,
+ "step": 2540
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0843443887597495e-05,
+ "loss": 0.9083,
+ "step": 2541
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0837233848981632e-05,
+ "loss": 0.8496,
+ "step": 2542
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0831023485193787e-05,
+ "loss": 0.876,
+ "step": 2543
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0824812798645997e-05,
+ "loss": 0.8915,
+ "step": 2544
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0818601791750418e-05,
+ "loss": 0.8835,
+ "step": 2545
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0812390466919337e-05,
+ "loss": 0.894,
+ "step": 2546
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0806178826565162e-05,
+ "loss": 0.9062,
+ "step": 2547
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0799966873100419e-05,
+ "loss": 0.9062,
+ "step": 2548
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0793754608937758e-05,
+ "loss": 0.843,
+ "step": 2549
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0787542036489955e-05,
+ "loss": 0.8888,
+ "step": 2550
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781329158169902e-05,
+ "loss": 0.8704,
+ "step": 2551
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0775115976390607e-05,
+ "loss": 0.9119,
+ "step": 2552
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0768902493565197e-05,
+ "loss": 0.7724,
+ "step": 2553
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0762688712106918e-05,
+ "loss": 0.9035,
+ "step": 2554
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0756474634429133e-05,
+ "loss": 0.9005,
+ "step": 2555
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0750260262945314e-05,
+ "loss": 0.8955,
+ "step": 2556
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0744045600069055e-05,
+ "loss": 0.3451,
+ "step": 2557
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0737830648214063e-05,
+ "loss": 0.8785,
+ "step": 2558
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0731615409794144e-05,
+ "loss": 0.8356,
+ "step": 2559
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0725399887223234e-05,
+ "loss": 0.8944,
+ "step": 2560
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0719184082915364e-05,
+ "loss": 0.8373,
+ "step": 2561
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0712967999284682e-05,
+ "loss": 0.8753,
+ "step": 2562
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0706751638745448e-05,
+ "loss": 0.7854,
+ "step": 2563
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0700535003712023e-05,
+ "loss": 0.9136,
+ "step": 2564
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.069431809659887e-05,
+ "loss": 0.8197,
+ "step": 2565
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.068810091982057e-05,
+ "loss": 0.8508,
+ "step": 2566
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0681883475791803e-05,
+ "loss": 0.9265,
+ "step": 2567
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067566576692735e-05,
+ "loss": 0.8755,
+ "step": 2568
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0669447795642103e-05,
+ "loss": 0.9198,
+ "step": 2569
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.066322956435104e-05,
+ "loss": 0.8855,
+ "step": 2570
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.065701107546926e-05,
+ "loss": 0.8474,
+ "step": 2571
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.065079233141195e-05,
+ "loss": 0.8718,
+ "step": 2572
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0644573334594395e-05,
+ "loss": 0.8611,
+ "step": 2573
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0638354087431986e-05,
+ "loss": 0.8439,
+ "step": 2574
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0632134592340204e-05,
+ "loss": 0.8367,
+ "step": 2575
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0625914851734632e-05,
+ "loss": 0.852,
+ "step": 2576
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0619694868030943e-05,
+ "loss": 0.92,
+ "step": 2577
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613474643644907e-05,
+ "loss": 0.8508,
+ "step": 2578
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0607254180992391e-05,
+ "loss": 0.8605,
+ "step": 2579
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0601033482489346e-05,
+ "loss": 0.8165,
+ "step": 2580
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0594812550551826e-05,
+ "loss": 0.8475,
+ "step": 2581
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.058859138759596e-05,
+ "loss": 0.8574,
+ "step": 2582
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0582369996037985e-05,
+ "loss": 0.8432,
+ "step": 2583
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0576148378294213e-05,
+ "loss": 0.9228,
+ "step": 2584
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.056992653678105e-05,
+ "loss": 0.8932,
+ "step": 2585
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0563704473914986e-05,
+ "loss": 0.8628,
+ "step": 2586
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0557482192112603e-05,
+ "loss": 0.8452,
+ "step": 2587
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0551259693790556e-05,
+ "loss": 0.8891,
+ "step": 2588
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0545036981365601e-05,
+ "loss": 0.8311,
+ "step": 2589
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.053881405725456e-05,
+ "loss": 0.8099,
+ "step": 2590
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0532590923874349e-05,
+ "loss": 0.8757,
+ "step": 2591
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0526367583641958e-05,
+ "loss": 0.9395,
+ "step": 2592
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0520144038974468e-05,
+ "loss": 0.8493,
+ "step": 2593
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0513920292289021e-05,
+ "loss": 0.9005,
+ "step": 2594
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0507696346002857e-05,
+ "loss": 0.8898,
+ "step": 2595
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0501472202533285e-05,
+ "loss": 0.9562,
+ "step": 2596
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0495247864297684e-05,
+ "loss": 0.7879,
+ "step": 2597
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0489023333713522e-05,
+ "loss": 0.8574,
+ "step": 2598
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0482798613198328e-05,
+ "loss": 0.8309,
+ "step": 2599
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.047657370516972e-05,
+ "loss": 0.8986,
+ "step": 2600
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0470348612045376e-05,
+ "loss": 0.8437,
+ "step": 2601
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0464123336243049e-05,
+ "loss": 0.3316,
+ "step": 2602
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0457897880180566e-05,
+ "loss": 0.9496,
+ "step": 2603
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0451672246275826e-05,
+ "loss": 0.8466,
+ "step": 2604
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0445446436946788e-05,
+ "loss": 0.8402,
+ "step": 2605
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0439220454611486e-05,
+ "loss": 0.8794,
+ "step": 2606
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0432994301688021e-05,
+ "loss": 0.8938,
+ "step": 2607
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0426767980594559e-05,
+ "loss": 0.8828,
+ "step": 2608
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420541493749332e-05,
+ "loss": 0.9552,
+ "step": 2609
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0414314843570634e-05,
+ "loss": 0.7741,
+ "step": 2610
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0408088032476822e-05,
+ "loss": 0.8506,
+ "step": 2611
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0401861062886324e-05,
+ "loss": 0.8795,
+ "step": 2612
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0395633937217622e-05,
+ "loss": 0.8442,
+ "step": 2613
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0389406657889254e-05,
+ "loss": 0.8694,
+ "step": 2614
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0383179227319826e-05,
+ "loss": 0.3218,
+ "step": 2615
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0376951647928007e-05,
+ "loss": 0.9393,
+ "step": 2616
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0370723922132506e-05,
+ "loss": 0.827,
+ "step": 2617
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.036449605235211e-05,
+ "loss": 0.9167,
+ "step": 2618
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0358268041005644e-05,
+ "loss": 0.9681,
+ "step": 2619
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0352039890511997e-05,
+ "loss": 0.8009,
+ "step": 2620
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.034581160329012e-05,
+ "loss": 0.8019,
+ "step": 2621
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0339583181758997e-05,
+ "loss": 0.8748,
+ "step": 2622
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.033335462833768e-05,
+ "loss": 0.8858,
+ "step": 2623
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0327125945445265e-05,
+ "loss": 0.8629,
+ "step": 2624
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0320897135500904e-05,
+ "loss": 0.8512,
+ "step": 2625
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0314668200923791e-05,
+ "loss": 0.8555,
+ "step": 2626
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0308439144133177e-05,
+ "loss": 0.3383,
+ "step": 2627
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0302209967548354e-05,
+ "loss": 0.833,
+ "step": 2628
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.029598067358866e-05,
+ "loss": 0.8692,
+ "step": 2629
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0289751264673485e-05,
+ "loss": 0.2985,
+ "step": 2630
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0283521743222256e-05,
+ "loss": 0.8183,
+ "step": 2631
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0277292111654447e-05,
+ "loss": 0.9011,
+ "step": 2632
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0271062372389582e-05,
+ "loss": 0.8701,
+ "step": 2633
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0264832527847212e-05,
+ "loss": 0.8217,
+ "step": 2634
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0258602580446941e-05,
+ "loss": 0.8651,
+ "step": 2635
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0252372532608405e-05,
+ "loss": 0.8406,
+ "step": 2636
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.024614238675129e-05,
+ "loss": 0.9297,
+ "step": 2637
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0239912145295303e-05,
+ "loss": 0.8563,
+ "step": 2638
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0233681810660207e-05,
+ "loss": 0.8697,
+ "step": 2639
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0227451385265788e-05,
+ "loss": 0.806,
+ "step": 2640
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.022122087153187e-05,
+ "loss": 0.9589,
+ "step": 2641
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0214990271878319e-05,
+ "loss": 0.8727,
+ "step": 2642
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0208759588725016e-05,
+ "loss": 0.9049,
+ "step": 2643
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0202528824491899e-05,
+ "loss": 0.8293,
+ "step": 2644
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0196297981598921e-05,
+ "loss": 0.8461,
+ "step": 2645
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.019006706246607e-05,
+ "loss": 0.8191,
+ "step": 2646
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.018383606951336e-05,
+ "loss": 0.8551,
+ "step": 2647
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0177605005160837e-05,
+ "loss": 0.8259,
+ "step": 2648
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0171373871828578e-05,
+ "loss": 0.8619,
+ "step": 2649
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0165142671936685e-05,
+ "loss": 0.8181,
+ "step": 2650
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0158911407905279e-05,
+ "loss": 0.8691,
+ "step": 2651
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0152680082154514e-05,
+ "loss": 0.8625,
+ "step": 2652
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0146448697104561e-05,
+ "loss": 0.8801,
+ "step": 2653
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0140217255175626e-05,
+ "loss": 0.8809,
+ "step": 2654
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.013398575878792e-05,
+ "loss": 0.3107,
+ "step": 2655
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0127754210361694e-05,
+ "loss": 0.8273,
+ "step": 2656
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0121522612317204e-05,
+ "loss": 0.912,
+ "step": 2657
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.011529096707473e-05,
+ "loss": 0.8444,
+ "step": 2658
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109059277054574e-05,
+ "loss": 0.3209,
+ "step": 2659
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.010282754467705e-05,
+ "loss": 0.9094,
+ "step": 2660
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0096595772362492e-05,
+ "loss": 0.8436,
+ "step": 2661
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0090363962531251e-05,
+ "loss": 0.3562,
+ "step": 2662
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084132117603689e-05,
+ "loss": 0.852,
+ "step": 2663
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0077900240000181e-05,
+ "loss": 0.7994,
+ "step": 2664
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071668332141115e-05,
+ "loss": 0.8901,
+ "step": 2665
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0065436396446899e-05,
+ "loss": 0.8518,
+ "step": 2666
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0059204435337938e-05,
+ "loss": 0.9159,
+ "step": 2667
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0052972451234656e-05,
+ "loss": 0.8454,
+ "step": 2668
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0046740446557485e-05,
+ "loss": 0.8894,
+ "step": 2669
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0040508423726865e-05,
+ "loss": 0.8484,
+ "step": 2670
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0034276385163238e-05,
+ "loss": 0.8848,
+ "step": 2671
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0028044333287056e-05,
+ "loss": 0.3464,
+ "step": 2672
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.002181227051878e-05,
+ "loss": 0.8401,
+ "step": 2673
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0015580199278873e-05,
+ "loss": 0.8855,
+ "step": 2674
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0009348121987795e-05,
+ "loss": 0.8782,
+ "step": 2675
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.000311604106601e-05,
+ "loss": 0.9418,
+ "step": 2676
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.996883958933993e-06,
+ "loss": 0.8429,
+ "step": 2677
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.99065187801221e-06,
+ "loss": 0.8959,
+ "step": 2678
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.984419800721132e-06,
+ "loss": 0.8356,
+ "step": 2679
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.978187729481218e-06,
+ "loss": 0.8438,
+ "step": 2680
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.971955666712945e-06,
+ "loss": 0.9788,
+ "step": 2681
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.965723614836764e-06,
+ "loss": 0.812,
+ "step": 2682
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.959491576273139e-06,
+ "loss": 0.9521,
+ "step": 2683
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.95325955344252e-06,
+ "loss": 0.8228,
+ "step": 2684
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.947027548765347e-06,
+ "loss": 0.8729,
+ "step": 2685
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.940795564662064e-06,
+ "loss": 0.9111,
+ "step": 2686
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.934563603553103e-06,
+ "loss": 0.847,
+ "step": 2687
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.928331667858886e-06,
+ "loss": 0.8866,
+ "step": 2688
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.922099759999822e-06,
+ "loss": 0.8624,
+ "step": 2689
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915867882396314e-06,
+ "loss": 0.9263,
+ "step": 2690
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.909636037468754e-06,
+ "loss": 0.8387,
+ "step": 2691
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.90340422763751e-06,
+ "loss": 0.8609,
+ "step": 2692
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.897172455322953e-06,
+ "loss": 0.8881,
+ "step": 2693
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890940722945429e-06,
+ "loss": 0.8277,
+ "step": 2694
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.884709032925274e-06,
+ "loss": 0.8753,
+ "step": 2695
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.878477387682801e-06,
+ "loss": 0.8395,
+ "step": 2696
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.872245789638308e-06,
+ "loss": 0.8026,
+ "step": 2697
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.866014241212078e-06,
+ "loss": 0.882,
+ "step": 2698
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.859782744824376e-06,
+ "loss": 0.8545,
+ "step": 2699
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.85355130289544e-06,
+ "loss": 0.9182,
+ "step": 2700
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.84731991784549e-06,
+ "loss": 0.8645,
+ "step": 2701
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.841088592094726e-06,
+ "loss": 0.8556,
+ "step": 2702
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.834857328063316e-06,
+ "loss": 0.8338,
+ "step": 2703
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.828626128171422e-06,
+ "loss": 0.8932,
+ "step": 2704
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.822394994839164e-06,
+ "loss": 0.8671,
+ "step": 2705
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.816163930486643e-06,
+ "loss": 0.881,
+ "step": 2706
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.809932937533935e-06,
+ "loss": 0.9008,
+ "step": 2707
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.803702018401084e-06,
+ "loss": 0.8434,
+ "step": 2708
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.797471175508101e-06,
+ "loss": 0.8887,
+ "step": 2709
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.791240411274982e-06,
+ "loss": 0.8327,
+ "step": 2710
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.785009728121686e-06,
+ "loss": 0.8942,
+ "step": 2711
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.778779128468133e-06,
+ "loss": 0.869,
+ "step": 2712
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.772548614734217e-06,
+ "loss": 0.8334,
+ "step": 2713
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.766318189339798e-06,
+ "loss": 0.8665,
+ "step": 2714
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.760087854704697e-06,
+ "loss": 0.8676,
+ "step": 2715
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.753857613248714e-06,
+ "loss": 0.8543,
+ "step": 2716
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747627467391596e-06,
+ "loss": 0.8593,
+ "step": 2717
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.741397419553062e-06,
+ "loss": 0.8325,
+ "step": 2718
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.735167472152793e-06,
+ "loss": 0.8437,
+ "step": 2719
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.728937627610425e-06,
+ "loss": 0.8573,
+ "step": 2720
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.722707888345553e-06,
+ "loss": 0.9113,
+ "step": 2721
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.716478256777749e-06,
+ "loss": 0.8483,
+ "step": 2722
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.710248735326519e-06,
+ "loss": 0.8847,
+ "step": 2723
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.704019326411344e-06,
+ "loss": 0.8559,
+ "step": 2724
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.697790032451651e-06,
+ "loss": 0.8365,
+ "step": 2725
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.691560855866826e-06,
+ "loss": 0.8783,
+ "step": 2726
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.685331799076208e-06,
+ "loss": 0.9162,
+ "step": 2727
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.6791028644991e-06,
+ "loss": 0.827,
+ "step": 2728
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.672874054554738e-06,
+ "loss": 0.9363,
+ "step": 2729
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.666645371662324e-06,
+ "loss": 0.8583,
+ "step": 2730
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.660416818241007e-06,
+ "loss": 0.8748,
+ "step": 2731
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.654188396709882e-06,
+ "loss": 0.8848,
+ "step": 2732
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.647960109488003e-06,
+ "loss": 0.8939,
+ "step": 2733
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.64173195899436e-06,
+ "loss": 0.8642,
+ "step": 2734
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.635503947647894e-06,
+ "loss": 0.8255,
+ "step": 2735
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.629276077867497e-06,
+ "loss": 0.8527,
+ "step": 2736
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.623048352071998e-06,
+ "loss": 0.8222,
+ "step": 2737
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.616820772680174e-06,
+ "loss": 0.8606,
+ "step": 2738
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.610593342110746e-06,
+ "loss": 0.8496,
+ "step": 2739
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.604366062782381e-06,
+ "loss": 0.8456,
+ "step": 2740
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.598138937113677e-06,
+ "loss": 0.8517,
+ "step": 2741
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.59191196752318e-06,
+ "loss": 0.3751,
+ "step": 2742
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.58568515642937e-06,
+ "loss": 0.845,
+ "step": 2743
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.579458506250668e-06,
+ "loss": 0.9585,
+ "step": 2744
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.573232019405441e-06,
+ "loss": 0.8443,
+ "step": 2745
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.567005698311982e-06,
+ "loss": 0.8783,
+ "step": 2746
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.560779545388517e-06,
+ "loss": 0.8972,
+ "step": 2747
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.554553563053217e-06,
+ "loss": 0.8733,
+ "step": 2748
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.548327753724181e-06,
+ "loss": 0.9073,
+ "step": 2749
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.542102119819436e-06,
+ "loss": 0.8283,
+ "step": 2750
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.535876663756955e-06,
+ "loss": 0.8966,
+ "step": 2751
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.529651387954628e-06,
+ "loss": 0.8656,
+ "step": 2752
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.523426294830284e-06,
+ "loss": 0.817,
+ "step": 2753
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.517201386801675e-06,
+ "loss": 0.9141,
+ "step": 2754
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.510976666286484e-06,
+ "loss": 0.908,
+ "step": 2755
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.504752135702318e-06,
+ "loss": 0.9099,
+ "step": 2756
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.498527797466718e-06,
+ "loss": 0.8117,
+ "step": 2757
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.492303653997146e-06,
+ "loss": 0.8525,
+ "step": 2758
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.48607970771098e-06,
+ "loss": 0.8752,
+ "step": 2759
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.479855961025538e-06,
+ "loss": 0.8283,
+ "step": 2760
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.473632416358045e-06,
+ "loss": 0.3365,
+ "step": 2761
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.467409076125653e-06,
+ "loss": 0.8983,
+ "step": 2762
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.461185942745443e-06,
+ "loss": 0.9038,
+ "step": 2763
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.454963018634402e-06,
+ "loss": 0.8155,
+ "step": 2764
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.448740306209447e-06,
+ "loss": 0.86,
+ "step": 2765
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.442517807887402e-06,
+ "loss": 0.8279,
+ "step": 2766
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.436295526085016e-06,
+ "loss": 0.9015,
+ "step": 2767
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.430073463218952e-06,
+ "loss": 0.8779,
+ "step": 2768
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.423851621705789e-06,
+ "loss": 0.9048,
+ "step": 2769
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.41763000396202e-06,
+ "loss": 0.8563,
+ "step": 2770
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411408612404043e-06,
+ "loss": 0.8487,
+ "step": 2771
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.40518744944818e-06,
+ "loss": 0.8595,
+ "step": 2772
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398966517510654e-06,
+ "loss": 0.9181,
+ "step": 2773
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.39274581900761e-06,
+ "loss": 0.9262,
+ "step": 2774
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386525356355095e-06,
+ "loss": 0.3415,
+ "step": 2775
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.380305131969059e-06,
+ "loss": 0.8506,
+ "step": 2776
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.374085148265372e-06,
+ "loss": 0.833,
+ "step": 2777
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.3678654076598e-06,
+ "loss": 0.8932,
+ "step": 2778
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.361645912568015e-06,
+ "loss": 0.8792,
+ "step": 2779
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.355426665405607e-06,
+ "loss": 0.934,
+ "step": 2780
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.349207668588053e-06,
+ "loss": 0.851,
+ "step": 2781
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.342988924530742e-06,
+ "loss": 0.8563,
+ "step": 2782
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.336770435648963e-06,
+ "loss": 0.8484,
+ "step": 2783
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.330552204357904e-06,
+ "loss": 0.8848,
+ "step": 2784
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.32433423307265e-06,
+ "loss": 0.872,
+ "step": 2785
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.318116524208198e-06,
+ "loss": 0.8068,
+ "step": 2786
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.311899080179433e-06,
+ "loss": 0.8732,
+ "step": 2787
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.305681903401133e-06,
+ "loss": 0.902,
+ "step": 2788
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.299464996287984e-06,
+ "loss": 0.8916,
+ "step": 2789
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.293248361254557e-06,
+ "loss": 0.8251,
+ "step": 2790
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.287032000715318e-06,
+ "loss": 0.8698,
+ "step": 2791
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.28081591708464e-06,
+ "loss": 0.8647,
+ "step": 2792
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.27460011277677e-06,
+ "loss": 0.8747,
+ "step": 2793
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.268384590205858e-06,
+ "loss": 0.8719,
+ "step": 2794
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.262169351785944e-06,
+ "loss": 0.8338,
+ "step": 2795
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.255954399930948e-06,
+ "loss": 0.8776,
+ "step": 2796
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.249739737054686e-06,
+ "loss": 0.8729,
+ "step": 2797
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.24352536557087e-06,
+ "loss": 0.8764,
+ "step": 2798
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.237311287893086e-06,
+ "loss": 0.813,
+ "step": 2799
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.231097506434808e-06,
+ "loss": 0.8445,
+ "step": 2800
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.224884023609398e-06,
+ "loss": 0.8937,
+ "step": 2801
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218670841830098e-06,
+ "loss": 0.8662,
+ "step": 2802
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.212457963510045e-06,
+ "loss": 0.8182,
+ "step": 2803
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.206245391062243e-06,
+ "loss": 0.8764,
+ "step": 2804
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.200033126899585e-06,
+ "loss": 0.8467,
+ "step": 2805
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.193821173434843e-06,
+ "loss": 0.331,
+ "step": 2806
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.187609533080668e-06,
+ "loss": 0.828,
+ "step": 2807
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.181398208249583e-06,
+ "loss": 0.8566,
+ "step": 2808
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.175187201354005e-06,
+ "loss": 0.9304,
+ "step": 2809
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.168976514806216e-06,
+ "loss": 0.8846,
+ "step": 2810
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.162766151018372e-06,
+ "loss": 0.8465,
+ "step": 2811
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.156556112402508e-06,
+ "loss": 0.8302,
+ "step": 2812
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.150346401370528e-06,
+ "loss": 0.8129,
+ "step": 2813
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.144137020334214e-06,
+ "loss": 0.8516,
+ "step": 2814
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.137927971705223e-06,
+ "loss": 0.9398,
+ "step": 2815
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.131719257895074e-06,
+ "loss": 0.8873,
+ "step": 2816
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.125510881315159e-06,
+ "loss": 0.8755,
+ "step": 2817
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.119302844376741e-06,
+ "loss": 0.8555,
+ "step": 2818
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.113095149490951e-06,
+ "loss": 0.8529,
+ "step": 2819
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.106887799068782e-06,
+ "loss": 0.9058,
+ "step": 2820
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.100680795521104e-06,
+ "loss": 0.9051,
+ "step": 2821
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.09447414125864e-06,
+ "loss": 0.9627,
+ "step": 2822
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.088267838691987e-06,
+ "loss": 0.9022,
+ "step": 2823
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.0820618902316e-06,
+ "loss": 0.9208,
+ "step": 2824
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.075856298287796e-06,
+ "loss": 0.3172,
+ "step": 2825
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.069651065270753e-06,
+ "loss": 0.8085,
+ "step": 2826
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.06344619359052e-06,
+ "loss": 0.7775,
+ "step": 2827
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.057241685656995e-06,
+ "loss": 0.8915,
+ "step": 2828
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.051037543879933e-06,
+ "loss": 0.782,
+ "step": 2829
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.044833770668957e-06,
+ "loss": 0.8743,
+ "step": 2830
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.038630368433537e-06,
+ "loss": 0.8376,
+ "step": 2831
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.032427339583e-06,
+ "loss": 0.8785,
+ "step": 2832
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.026224686526539e-06,
+ "loss": 0.9186,
+ "step": 2833
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.020022411673186e-06,
+ "loss": 0.8367,
+ "step": 2834
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.013820517431841e-06,
+ "loss": 0.8397,
+ "step": 2835
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.00761900621124e-06,
+ "loss": 0.88,
+ "step": 2836
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.00141788041998e-06,
+ "loss": 0.8992,
+ "step": 2837
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.99521714246651e-06,
+ "loss": 0.8569,
+ "step": 2838
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.989016794759127e-06,
+ "loss": 0.8422,
+ "step": 2839
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.98281683970597e-06,
+ "loss": 0.8888,
+ "step": 2840
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.97661727971503e-06,
+ "loss": 0.935,
+ "step": 2841
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.970418117194146e-06,
+ "loss": 0.9008,
+ "step": 2842
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.964219354550999e-06,
+ "loss": 0.8679,
+ "step": 2843
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.958020994193124e-06,
+ "loss": 0.8332,
+ "step": 2844
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.951823038527887e-06,
+ "loss": 0.871,
+ "step": 2845
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.945625489962503e-06,
+ "loss": 0.8973,
+ "step": 2846
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.93942835090403e-06,
+ "loss": 0.8367,
+ "step": 2847
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.933231623759365e-06,
+ "loss": 0.8643,
+ "step": 2848
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.927035310935241e-06,
+ "loss": 0.8812,
+ "step": 2849
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.920839414838243e-06,
+ "loss": 0.8782,
+ "step": 2850
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.914643937874778e-06,
+ "loss": 0.866,
+ "step": 2851
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.908448882451104e-06,
+ "loss": 0.8704,
+ "step": 2852
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.902254250973306e-06,
+ "loss": 0.8716,
+ "step": 2853
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.896060045847305e-06,
+ "loss": 0.8317,
+ "step": 2854
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.88986626947886e-06,
+ "loss": 0.8246,
+ "step": 2855
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883672924273566e-06,
+ "loss": 0.8153,
+ "step": 2856
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.877480012636847e-06,
+ "loss": 0.9074,
+ "step": 2857
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.871287536973954e-06,
+ "loss": 0.8408,
+ "step": 2858
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.865095499689978e-06,
+ "loss": 0.3436,
+ "step": 2859
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85890390318983e-06,
+ "loss": 0.9315,
+ "step": 2860
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.852712749878255e-06,
+ "loss": 0.8388,
+ "step": 2861
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.846522042159833e-06,
+ "loss": 0.8739,
+ "step": 2862
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.840331782438954e-06,
+ "loss": 0.8447,
+ "step": 2863
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.83414197311985e-06,
+ "loss": 0.8676,
+ "step": 2864
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.82795261660657e-06,
+ "loss": 0.8118,
+ "step": 2865
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.821763715302986e-06,
+ "loss": 0.8921,
+ "step": 2866
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.815575271612798e-06,
+ "loss": 0.8897,
+ "step": 2867
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.809387287939528e-06,
+ "loss": 0.8711,
+ "step": 2868
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.803199766686517e-06,
+ "loss": 0.8597,
+ "step": 2869
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.797012710256923e-06,
+ "loss": 0.8741,
+ "step": 2870
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.790826121053732e-06,
+ "loss": 0.8237,
+ "step": 2871
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.784640001479741e-06,
+ "loss": 0.8833,
+ "step": 2872
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.77845435393757e-06,
+ "loss": 0.8616,
+ "step": 2873
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.772269180829653e-06,
+ "loss": 0.8781,
+ "step": 2874
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.766084484558237e-06,
+ "loss": 0.9086,
+ "step": 2875
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.759900267525393e-06,
+ "loss": 0.8667,
+ "step": 2876
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.753716532132992e-06,
+ "loss": 0.8897,
+ "step": 2877
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.747533280782725e-06,
+ "loss": 0.8443,
+ "step": 2878
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.741350515876103e-06,
+ "loss": 0.8672,
+ "step": 2879
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.73516823981444e-06,
+ "loss": 0.8794,
+ "step": 2880
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.728986454998858e-06,
+ "loss": 0.8522,
+ "step": 2881
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.72280516383029e-06,
+ "loss": 0.8251,
+ "step": 2882
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716624368709477e-06,
+ "loss": 0.8877,
+ "step": 2883
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.71044407203697e-06,
+ "loss": 0.8925,
+ "step": 2884
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.70426427621313e-06,
+ "loss": 0.8259,
+ "step": 2885
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.698084983638111e-06,
+ "loss": 0.8513,
+ "step": 2886
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.691906196711884e-06,
+ "loss": 0.9092,
+ "step": 2887
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.685727917834218e-06,
+ "loss": 0.8844,
+ "step": 2888
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.679550149404685e-06,
+ "loss": 0.88,
+ "step": 2889
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.673372893822653e-06,
+ "loss": 0.856,
+ "step": 2890
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.667196153487308e-06,
+ "loss": 0.8433,
+ "step": 2891
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.661019930797615e-06,
+ "loss": 0.8484,
+ "step": 2892
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.654844228152355e-06,
+ "loss": 0.9087,
+ "step": 2893
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.648669047950097e-06,
+ "loss": 0.795,
+ "step": 2894
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.642494392589206e-06,
+ "loss": 0.8777,
+ "step": 2895
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.63632026446785e-06,
+ "loss": 0.8304,
+ "step": 2896
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.630146665983993e-06,
+ "loss": 0.8739,
+ "step": 2897
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.623973599535385e-06,
+ "loss": 0.8136,
+ "step": 2898
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.617801067519575e-06,
+ "loss": 0.8462,
+ "step": 2899
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.611629072333905e-06,
+ "loss": 0.8709,
+ "step": 2900
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.605457616375503e-06,
+ "loss": 0.8373,
+ "step": 2901
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.599286702041292e-06,
+ "loss": 0.8282,
+ "step": 2902
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.593116331727987e-06,
+ "loss": 0.8921,
+ "step": 2903
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.586946507832088e-06,
+ "loss": 0.3204,
+ "step": 2904
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.580777232749883e-06,
+ "loss": 0.8323,
+ "step": 2905
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.574608508877448e-06,
+ "loss": 0.8535,
+ "step": 2906
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.568440338610638e-06,
+ "loss": 0.8983,
+ "step": 2907
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.562272724345108e-06,
+ "loss": 0.891,
+ "step": 2908
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.556105668476287e-06,
+ "loss": 0.9547,
+ "step": 2909
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.549939173399385e-06,
+ "loss": 0.8641,
+ "step": 2910
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.5437732415094e-06,
+ "loss": 0.9178,
+ "step": 2911
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.537607875201106e-06,
+ "loss": 0.9223,
+ "step": 2912
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.531443076869058e-06,
+ "loss": 0.886,
+ "step": 2913
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.525278848907603e-06,
+ "loss": 0.8435,
+ "step": 2914
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.51911519371085e-06,
+ "loss": 0.8763,
+ "step": 2915
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.512952113672689e-06,
+ "loss": 0.8662,
+ "step": 2916
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.506789611186794e-06,
+ "loss": 0.8553,
+ "step": 2917
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.500627688646607e-06,
+ "loss": 0.8467,
+ "step": 2918
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.494466348445345e-06,
+ "loss": 0.8956,
+ "step": 2919
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.48830559297601e-06,
+ "loss": 0.9071,
+ "step": 2920
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.48214542463136e-06,
+ "loss": 0.8047,
+ "step": 2921
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.475985845803938e-06,
+ "loss": 0.8647,
+ "step": 2922
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.469826858886054e-06,
+ "loss": 0.8714,
+ "step": 2923
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.463668466269785e-06,
+ "loss": 0.9193,
+ "step": 2924
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.457510670346976e-06,
+ "loss": 0.8771,
+ "step": 2925
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.451353473509254e-06,
+ "loss": 0.8763,
+ "step": 2926
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.445196878147997e-06,
+ "loss": 0.8846,
+ "step": 2927
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.439040886654354e-06,
+ "loss": 0.8258,
+ "step": 2928
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.432885501419248e-06,
+ "loss": 0.8984,
+ "step": 2929
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.426730724833354e-06,
+ "loss": 0.848,
+ "step": 2930
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.420576559287112e-06,
+ "loss": 0.8857,
+ "step": 2931
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.414423007170742e-06,
+ "loss": 0.8956,
+ "step": 2932
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.408270070874201e-06,
+ "loss": 0.8615,
+ "step": 2933
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.402117752787225e-06,
+ "loss": 0.8421,
+ "step": 2934
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.395966055299302e-06,
+ "loss": 0.8771,
+ "step": 2935
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.389814980799679e-06,
+ "loss": 0.905,
+ "step": 2936
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.38366453167736e-06,
+ "loss": 0.8578,
+ "step": 2937
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.377514710321117e-06,
+ "loss": 0.8943,
+ "step": 2938
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.371365519119463e-06,
+ "loss": 0.9173,
+ "step": 2939
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.365216960460675e-06,
+ "loss": 0.8391,
+ "step": 2940
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.359069036732781e-06,
+ "loss": 0.8904,
+ "step": 2941
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.352921750323562e-06,
+ "loss": 0.9559,
+ "step": 2942
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.346775103620559e-06,
+ "loss": 0.7832,
+ "step": 2943
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.340629099011057e-06,
+ "loss": 0.8711,
+ "step": 2944
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.33448373888209e-06,
+ "loss": 0.33,
+ "step": 2945
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.328339025620449e-06,
+ "loss": 0.9021,
+ "step": 2946
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.322194961612668e-06,
+ "loss": 0.8918,
+ "step": 2947
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.316051549245026e-06,
+ "loss": 0.898,
+ "step": 2948
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.309908790903562e-06,
+ "loss": 0.8721,
+ "step": 2949
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.303766688974047e-06,
+ "loss": 0.922,
+ "step": 2950
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.297625245842006e-06,
+ "loss": 0.331,
+ "step": 2951
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.291484463892703e-06,
+ "loss": 0.8646,
+ "step": 2952
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.285344345511147e-06,
+ "loss": 0.8507,
+ "step": 2953
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.279204893082083e-06,
+ "loss": 0.898,
+ "step": 2954
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.273066108990017e-06,
+ "loss": 0.8155,
+ "step": 2955
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.266927995619175e-06,
+ "loss": 0.8892,
+ "step": 2956
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.260790555353526e-06,
+ "loss": 0.8829,
+ "step": 2957
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.254653790576787e-06,
+ "loss": 0.8767,
+ "step": 2958
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.248517703672405e-06,
+ "loss": 0.8603,
+ "step": 2959
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.242382297023558e-06,
+ "loss": 0.8926,
+ "step": 2960
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.23624757301318e-06,
+ "loss": 0.8079,
+ "step": 2961
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.230113534023917e-06,
+ "loss": 0.8841,
+ "step": 2962
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.223980182438167e-06,
+ "loss": 0.8636,
+ "step": 2963
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.217847520638049e-06,
+ "loss": 0.8365,
+ "step": 2964
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.211715551005414e-06,
+ "loss": 0.8443,
+ "step": 2965
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.205584275921854e-06,
+ "loss": 0.7492,
+ "step": 2966
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.199453697768686e-06,
+ "loss": 0.7804,
+ "step": 2967
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.193323818926955e-06,
+ "loss": 0.8401,
+ "step": 2968
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.187194641777432e-06,
+ "loss": 0.8763,
+ "step": 2969
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.181066168700622e-06,
+ "loss": 0.8813,
+ "step": 2970
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.174938402076754e-06,
+ "loss": 0.8827,
+ "step": 2971
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.168811344285776e-06,
+ "loss": 0.8392,
+ "step": 2972
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.162684997707374e-06,
+ "loss": 0.83,
+ "step": 2973
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.156559364720947e-06,
+ "loss": 0.831,
+ "step": 2974
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.150434447705623e-06,
+ "loss": 0.8507,
+ "step": 2975
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.144310249040246e-06,
+ "loss": 0.8886,
+ "step": 2976
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.138186771103382e-06,
+ "loss": 0.842,
+ "step": 2977
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.132064016273325e-06,
+ "loss": 0.8096,
+ "step": 2978
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.12594198692808e-06,
+ "loss": 0.8106,
+ "step": 2979
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.119820685445373e-06,
+ "loss": 0.8398,
+ "step": 2980
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.113700114202647e-06,
+ "loss": 0.8498,
+ "step": 2981
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.107580275577059e-06,
+ "loss": 0.8434,
+ "step": 2982
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.101461171945483e-06,
+ "loss": 0.8361,
+ "step": 2983
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.095342805684516e-06,
+ "loss": 0.8251,
+ "step": 2984
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.089225179170454e-06,
+ "loss": 0.834,
+ "step": 2985
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.083108294779313e-06,
+ "loss": 0.8794,
+ "step": 2986
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.076992154886826e-06,
+ "loss": 0.8278,
+ "step": 2987
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.070876761868426e-06,
+ "loss": 0.3506,
+ "step": 2988
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.064762118099258e-06,
+ "loss": 0.8355,
+ "step": 2989
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.058648225954188e-06,
+ "loss": 0.8349,
+ "step": 2990
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.052535087807774e-06,
+ "loss": 0.8381,
+ "step": 2991
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.046422706034294e-06,
+ "loss": 0.8499,
+ "step": 2992
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.040311083007725e-06,
+ "loss": 0.8846,
+ "step": 2993
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.034200221101746e-06,
+ "loss": 0.8161,
+ "step": 2994
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.028090122689747e-06,
+ "loss": 0.8195,
+ "step": 2995
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.021980790144828e-06,
+ "loss": 0.9054,
+ "step": 2996
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015872225839776e-06,
+ "loss": 0.8296,
+ "step": 2997
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.009764432147086e-06,
+ "loss": 0.8689,
+ "step": 2998
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.003657411438961e-06,
+ "loss": 0.8763,
+ "step": 2999
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.997551166087293e-06,
+ "loss": 0.861,
+ "step": 3000
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5197,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 500,
+ "total_flos": 8242474352115712.0,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/training_args.bin b/llava-v1.5-7b-concat-16/checkpoint-3000/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..b5632db25e85f4a6440989c9cf4e5eb45e67ccd6
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e1397f63ab71c83d4546fd5cc220108e4e3680c17b2f7501e2a09ab729de344
+size 6712
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3000/zero_to_fp32.py b/llava-v1.5-7b-concat-16/checkpoint-3000/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..c98caae31534368be22b67fc4ae906836c992a8d
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3000/zero_to_fp32.py
@@ -0,0 +1,587 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, args.output_file, tag=args.tag)
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/config.json b/llava-v1.5-7b-concat-16/checkpoint-3500/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..5b87d31c757ff1906899f1e3a1d047752a0c5005
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/config.json
@@ -0,0 +1,44 @@
+{
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "pad",
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1536,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "Leonardo6/clip-12m-16-roberta4",
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "use_cache": false,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/generation_config.json b/llava-v1.5-7b-concat-16/checkpoint-3500/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f686e33d0dd24a8bc304bf932f5bc12717579f0b
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/generation_config.json
@@ -0,0 +1,11 @@
+{
+ "attn_implementation": "flash_attention_2",
+ "bos_token_id": 1,
+ "do_sample": true,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "temperature": 0.9,
+ "top_p": 0.6,
+ "transformers_version": "4.37.2"
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/latest b/llava-v1.5-7b-concat-16/checkpoint-3500/latest
new file mode 100644
index 0000000000000000000000000000000000000000..f3af93198d9ca215e3991f85bf3781ae5dfe71f3
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/latest
@@ -0,0 +1 @@
+global_step3500
\ No newline at end of file
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/model.safetensors.index.json b/llava-v1.5-7b-concat-16/checkpoint-3500/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..51cd1fe67b08db18738439b039f9eec8e67fa02f
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/model.safetensors.index.json
@@ -0,0 +1,701 @@
+{
+ "metadata": {
+ "total_size": 13867362304
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.class_embedding": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.patch_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.embeddings.position_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.post_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower1.vision_model.pre_layrnorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.cls_token": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.patch_embeddings.projection.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.embeddings.position_embeddings": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.0.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.1.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.10.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.11.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.2.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.3.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.4.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.5.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.6.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.7.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.8.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.key.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.query.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.attention.value.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.attention.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.intermediate.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_after.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.layernorm_before.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.encoder.layer.9.output.dense.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower2.pooler.dense.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_0.pth b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..44bb770e4c85b7b758a6b2962384781d026daabd
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af2966def51ea1ab87d97a757bd22e7f72001f21baee1a67abfc367e92e2e402
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_1.pth b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..75cd02b1fceb1b3b1aae40cf4857ce2cea6fd436
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8cf5883933ffd2749908af2fffabf58c748ecc9afbc507bfa1868172477bbf0c
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_2.pth b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..127a0ab4fb3652fab0edcb4ecc63af17870be47c
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b4f7265b828abac3132886540e39210cab53edc42ddf0389bd517ccd5c1ca42d
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_3.pth b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_3.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a696a927c56c2b5ca8cb6f3d71f9ca36a1ae9fea
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/rng_state_3.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1ebdec8d90b17c1d6090b2bc79535cba013a72aa00b297c128236362564f916
+size 15024
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/scheduler.pt b/llava-v1.5-7b-concat-16/checkpoint-3500/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b6aa061b31194cb51c214a5e9a0524a6fae4c5bb
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1135365f9d6ba7d09a970bf1df154fd2cdb01e4a902d69310e73c795e9a468b7
+size 1064
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/special_tokens_map.json b/llava-v1.5-7b-concat-16/checkpoint-3500/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/tokenizer.model b/llava-v1.5-7b-concat-16/checkpoint-3500/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/tokenizer_config.json b/llava-v1.5-7b-concat-16/checkpoint-3500/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/trainer_state.json b/llava-v1.5-7b-concat-16/checkpoint-3500/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..17fd0a8a29bef13a4178e1156dc7d3737c107aa3
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/trainer_state.json
@@ -0,0 +1,21021 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.6733682843538069,
+ "eval_steps": 500,
+ "global_step": 3500,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-07,
+ "loss": 1.437,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-07,
+ "loss": 1.4396,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.846153846153847e-07,
+ "loss": 1.4201,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.128205128205128e-07,
+ "loss": 1.4526,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 6.41025641025641e-07,
+ "loss": 1.4033,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.692307692307694e-07,
+ "loss": 1.4341,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 8.974358974358975e-07,
+ "loss": 1.455,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0256410256410257e-06,
+ "loss": 1.4195,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.153846153846154e-06,
+ "loss": 1.4367,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.282051282051282e-06,
+ "loss": 1.3549,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.4102564102564104e-06,
+ "loss": 1.3929,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5384615384615387e-06,
+ "loss": 1.3577,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.6666666666666667e-06,
+ "loss": 1.3198,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.794871794871795e-06,
+ "loss": 1.242,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.9230769230769234e-06,
+ "loss": 1.2693,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.0512820512820513e-06,
+ "loss": 1.3043,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.1794871794871797e-06,
+ "loss": 1.2034,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.307692307692308e-06,
+ "loss": 1.1896,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.435897435897436e-06,
+ "loss": 1.2483,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.564102564102564e-06,
+ "loss": 1.1324,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6923076923076923e-06,
+ "loss": 1.2191,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8205128205128207e-06,
+ "loss": 1.1962,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.948717948717949e-06,
+ "loss": 1.125,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.0769230769230774e-06,
+ "loss": 1.2311,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.205128205128206e-06,
+ "loss": 1.1687,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.3333333333333333e-06,
+ "loss": 1.1214,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.4615384615384617e-06,
+ "loss": 1.1449,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.58974358974359e-06,
+ "loss": 1.139,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.7179487179487184e-06,
+ "loss": 1.0864,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.846153846153847e-06,
+ "loss": 1.1032,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.974358974358974e-06,
+ "loss": 1.1475,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.102564102564103e-06,
+ "loss": 1.0742,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.230769230769231e-06,
+ "loss": 1.1101,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.358974358974359e-06,
+ "loss": 1.0727,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.487179487179488e-06,
+ "loss": 1.0478,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.615384615384616e-06,
+ "loss": 1.099,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.743589743589744e-06,
+ "loss": 0.3001,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.871794871794872e-06,
+ "loss": 1.095,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 1.0828,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.128205128205128e-06,
+ "loss": 1.0715,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.256410256410257e-06,
+ "loss": 1.0794,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.384615384615385e-06,
+ "loss": 1.1222,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.512820512820514e-06,
+ "loss": 1.0315,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.641025641025641e-06,
+ "loss": 1.0473,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.769230769230769e-06,
+ "loss": 1.1067,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.897435897435898e-06,
+ "loss": 1.0335,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.025641025641026e-06,
+ "loss": 1.0009,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.153846153846155e-06,
+ "loss": 1.0086,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.282051282051282e-06,
+ "loss": 1.0027,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.410256410256412e-06,
+ "loss": 1.0066,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.538461538461539e-06,
+ "loss": 1.0375,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.666666666666667e-06,
+ "loss": 1.0491,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.794871794871796e-06,
+ "loss": 1.0522,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.923076923076923e-06,
+ "loss": 0.9977,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.051282051282053e-06,
+ "loss": 1.0516,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.17948717948718e-06,
+ "loss": 0.3065,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.307692307692308e-06,
+ "loss": 1.057,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.435897435897437e-06,
+ "loss": 1.0589,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.564102564102564e-06,
+ "loss": 1.0796,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.692307692307694e-06,
+ "loss": 1.0433,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.820512820512822e-06,
+ "loss": 0.9848,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.948717948717949e-06,
+ "loss": 1.0166,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.076923076923077e-06,
+ "loss": 0.9902,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.205128205128205e-06,
+ "loss": 1.0357,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.333333333333334e-06,
+ "loss": 0.9981,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.461538461538462e-06,
+ "loss": 0.9887,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.58974358974359e-06,
+ "loss": 0.9445,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.717948717948719e-06,
+ "loss": 1.0034,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.846153846153847e-06,
+ "loss": 0.9865,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.974358974358976e-06,
+ "loss": 1.0095,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.102564102564104e-06,
+ "loss": 0.988,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.230769230769232e-06,
+ "loss": 0.9673,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.358974358974359e-06,
+ "loss": 1.0383,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.487179487179487e-06,
+ "loss": 0.9842,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.615384615384616e-06,
+ "loss": 0.9988,
+ "step": 75
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.743589743589744e-06,
+ "loss": 0.9715,
+ "step": 76
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.871794871794872e-06,
+ "loss": 0.9306,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 1.0179,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.012820512820513e-05,
+ "loss": 1.0813,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0256410256410256e-05,
+ "loss": 0.9825,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0384615384615386e-05,
+ "loss": 1.0437,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0512820512820514e-05,
+ "loss": 1.0863,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0641025641025643e-05,
+ "loss": 1.0367,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.076923076923077e-05,
+ "loss": 0.9808,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0897435897435898e-05,
+ "loss": 0.9815,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1025641025641028e-05,
+ "loss": 1.0001,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1153846153846154e-05,
+ "loss": 1.0443,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1282051282051283e-05,
+ "loss": 1.0108,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1410256410256411e-05,
+ "loss": 0.2945,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1538461538461538e-05,
+ "loss": 0.9728,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1666666666666668e-05,
+ "loss": 1.0072,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1794871794871796e-05,
+ "loss": 1.0504,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1923076923076925e-05,
+ "loss": 1.0167,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2051282051282051e-05,
+ "loss": 0.9801,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.217948717948718e-05,
+ "loss": 0.307,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.230769230769231e-05,
+ "loss": 0.9832,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2435897435897436e-05,
+ "loss": 1.0245,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2564102564102565e-05,
+ "loss": 1.016,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2692307692307693e-05,
+ "loss": 0.9245,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2820512820512823e-05,
+ "loss": 0.2833,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.294871794871795e-05,
+ "loss": 0.9694,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3076923076923078e-05,
+ "loss": 1.0847,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3205128205128207e-05,
+ "loss": 0.9805,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3333333333333333e-05,
+ "loss": 1.0026,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3461538461538463e-05,
+ "loss": 0.2628,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3589743589743592e-05,
+ "loss": 0.9652,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3717948717948718e-05,
+ "loss": 1.0551,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3846153846153847e-05,
+ "loss": 0.9897,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3974358974358975e-05,
+ "loss": 1.0074,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4102564102564105e-05,
+ "loss": 0.9967,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4230769230769232e-05,
+ "loss": 0.9988,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.435897435897436e-05,
+ "loss": 1.07,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4487179487179489e-05,
+ "loss": 0.9754,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4615384615384615e-05,
+ "loss": 1.022,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4743589743589745e-05,
+ "loss": 0.9851,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4871794871794874e-05,
+ "loss": 0.2876,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 1.0329,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5128205128205129e-05,
+ "loss": 0.9843,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5256410256410257e-05,
+ "loss": 1.0554,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5384615384615387e-05,
+ "loss": 0.9708,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5512820512820516e-05,
+ "loss": 1.0252,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5641025641025644e-05,
+ "loss": 1.0464,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.576923076923077e-05,
+ "loss": 0.9278,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5897435897435897e-05,
+ "loss": 1.0028,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.602564102564103e-05,
+ "loss": 1.0319,
+ "step": 125
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6153846153846154e-05,
+ "loss": 0.9877,
+ "step": 126
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6282051282051282e-05,
+ "loss": 1.067,
+ "step": 127
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.641025641025641e-05,
+ "loss": 0.8855,
+ "step": 128
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.653846153846154e-05,
+ "loss": 0.9933,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.0038,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6794871794871796e-05,
+ "loss": 1.0337,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6923076923076924e-05,
+ "loss": 0.9185,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7051282051282053e-05,
+ "loss": 0.9293,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.717948717948718e-05,
+ "loss": 0.9535,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.730769230769231e-05,
+ "loss": 0.9931,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7435897435897438e-05,
+ "loss": 0.934,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7564102564102566e-05,
+ "loss": 0.9966,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7692307692307694e-05,
+ "loss": 1.018,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7820512820512823e-05,
+ "loss": 0.9646,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.794871794871795e-05,
+ "loss": 1.0316,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.807692307692308e-05,
+ "loss": 1.0237,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8205128205128208e-05,
+ "loss": 1.0058,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8333333333333333e-05,
+ "loss": 1.0256,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8461538461538465e-05,
+ "loss": 0.9973,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8589743589743593e-05,
+ "loss": 0.9952,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8717948717948718e-05,
+ "loss": 0.9886,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8846153846153846e-05,
+ "loss": 0.9792,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8974358974358975e-05,
+ "loss": 0.9918,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9102564102564106e-05,
+ "loss": 0.9943,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.923076923076923e-05,
+ "loss": 1.0113,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.935897435897436e-05,
+ "loss": 1.0034,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9487179487179488e-05,
+ "loss": 0.2836,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9615384615384617e-05,
+ "loss": 0.9962,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9743589743589745e-05,
+ "loss": 0.9848,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9871794871794873e-05,
+ "loss": 0.9459,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 1.06,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999998058057616e-05,
+ "loss": 1.0001,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999992232231216e-05,
+ "loss": 1.0235,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999998252252306e-05,
+ "loss": 0.9819,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999968928936924e-05,
+ "loss": 0.9859,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999995145147809e-05,
+ "loss": 0.9607,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999930090153335e-05,
+ "loss": 0.9999,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999904844970963e-05,
+ "loss": 0.9986,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999987571594078e-05,
+ "loss": 0.337,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.99998427030741e-05,
+ "loss": 0.9978,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999980580638374e-05,
+ "loss": 1.0083,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999976502588403e-05,
+ "loss": 0.9703,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999720361590812e-05,
+ "loss": 0.9653,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999671813521435e-05,
+ "loss": 0.9899,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999961938169475e-05,
+ "loss": 0.9462,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999563066131124e-05,
+ "loss": 0.8944,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999502866852427e-05,
+ "loss": 1.0217,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999943878388204e-05,
+ "loss": 0.9505,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999370817244853e-05,
+ "loss": 0.9858,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999298966967264e-05,
+ "loss": 1.0156,
+ "step": 175
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999223233077178e-05,
+ "loss": 1.001,
+ "step": 176
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999914361560401e-05,
+ "loss": 0.9823,
+ "step": 177
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999060114578682e-05,
+ "loss": 0.9295,
+ "step": 178
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998972730033624e-05,
+ "loss": 0.9641,
+ "step": 179
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998881462002778e-05,
+ "loss": 0.2889,
+ "step": 180
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998786310521585e-05,
+ "loss": 0.9556,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998687275627008e-05,
+ "loss": 1.0336,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998584357357503e-05,
+ "loss": 0.9954,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998477555753054e-05,
+ "loss": 0.958,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998366870855134e-05,
+ "loss": 1.0338,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999825230270673e-05,
+ "loss": 0.982,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998133851352342e-05,
+ "loss": 0.3328,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998011516837974e-05,
+ "loss": 0.9857,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999788529921114e-05,
+ "loss": 0.917,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999775519852086e-05,
+ "loss": 0.2945,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999762121481767e-05,
+ "loss": 0.9773,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99974833481536e-05,
+ "loss": 0.9617,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997341598582197e-05,
+ "loss": 1.0578,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997195966158518e-05,
+ "loss": 0.9984,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997046450939122e-05,
+ "loss": 0.9619,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996893052982083e-05,
+ "loss": 1.0214,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996735772346973e-05,
+ "loss": 0.9952,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996574609094887e-05,
+ "loss": 1.0151,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996409563288404e-05,
+ "loss": 0.9638,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996240634991645e-05,
+ "loss": 0.9891,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996067824270204e-05,
+ "loss": 1.0223,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999589113119121e-05,
+ "loss": 1.0309,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995710555823277e-05,
+ "loss": 1.0079,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999552609823655e-05,
+ "loss": 0.9522,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999533775850266e-05,
+ "loss": 0.3102,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995145536694764e-05,
+ "loss": 0.9981,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994949432887512e-05,
+ "loss": 0.9842,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999474944715708e-05,
+ "loss": 0.9885,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994545579581125e-05,
+ "loss": 1.0181,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994337830238836e-05,
+ "loss": 0.9843,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994126199210897e-05,
+ "loss": 0.9757,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999391068657951e-05,
+ "loss": 0.9023,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993691292428364e-05,
+ "loss": 0.9472,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993468016842684e-05,
+ "loss": 0.9836,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999324085990918e-05,
+ "loss": 0.9871,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993009821716076e-05,
+ "loss": 1.0082,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992774902353104e-05,
+ "loss": 0.2744,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999253610191151e-05,
+ "loss": 0.3193,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999229342048404e-05,
+ "loss": 1.0274,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992046858164942e-05,
+ "loss": 0.2843,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999179641504999e-05,
+ "loss": 0.982,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991542091236438e-05,
+ "loss": 0.976,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991283886823075e-05,
+ "loss": 1.0374,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991021801910177e-05,
+ "loss": 1.0289,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999075583659954e-05,
+ "loss": 0.9761,
+ "step": 225
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999048599099446e-05,
+ "loss": 0.2977,
+ "step": 226
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990212265199738e-05,
+ "loss": 0.9407,
+ "step": 227
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998993465932169e-05,
+ "loss": 1.0007,
+ "step": 228
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989653173468137e-05,
+ "loss": 0.9877,
+ "step": 229
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99893678077484e-05,
+ "loss": 0.9289,
+ "step": 230
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989078562273313e-05,
+ "loss": 0.9585,
+ "step": 231
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988785437155222e-05,
+ "loss": 0.9449,
+ "step": 232
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9988488432507963e-05,
+ "loss": 1.0345,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9988187548446895e-05,
+ "loss": 0.965,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998788278508888e-05,
+ "loss": 0.9971,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987574142552274e-05,
+ "loss": 0.9898,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987261620956964e-05,
+ "loss": 0.9984,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986945220424326e-05,
+ "loss": 1.0083,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998662494107724e-05,
+ "loss": 0.9371,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99863007830401e-05,
+ "loss": 1.024,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985972746438815e-05,
+ "loss": 1.0131,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985640831400778e-05,
+ "loss": 0.923,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998530503805491e-05,
+ "loss": 0.9833,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984965366531624e-05,
+ "loss": 0.981,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984621816962843e-05,
+ "loss": 0.9922,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984274389482005e-05,
+ "loss": 1.037,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983923084224047e-05,
+ "loss": 0.9879,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983567901325404e-05,
+ "loss": 0.9919,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983208840924028e-05,
+ "loss": 0.9303,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998284590315937e-05,
+ "loss": 0.9406,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982479088172403e-05,
+ "loss": 0.3251,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982108396105584e-05,
+ "loss": 0.9975,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981733827102884e-05,
+ "loss": 0.9497,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998135538130979e-05,
+ "loss": 0.9562,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998097305887328e-05,
+ "loss": 1.0052,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980586859941846e-05,
+ "loss": 0.9342,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998019678466548e-05,
+ "loss": 0.9237,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997980283319568e-05,
+ "loss": 0.9744,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979405005685466e-05,
+ "loss": 1.0382,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979003302289336e-05,
+ "loss": 0.9797,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997859772316331e-05,
+ "loss": 0.9955,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978188268464912e-05,
+ "loss": 0.8648,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997777493835317e-05,
+ "loss": 0.9995,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977357732988616e-05,
+ "loss": 0.9618,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976936652533288e-05,
+ "loss": 0.9682,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997651169715073e-05,
+ "loss": 0.9777,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976082867005985e-05,
+ "loss": 0.9652,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997565016226561e-05,
+ "loss": 1.0588,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997521358309766e-05,
+ "loss": 0.8892,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99747731296717e-05,
+ "loss": 0.9918,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9974328802158798e-05,
+ "loss": 0.9888,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997388060073152e-05,
+ "loss": 1.022,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973428525563948e-05,
+ "loss": 0.9987,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972972576831656e-05,
+ "loss": 0.9734,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972512754711738e-05,
+ "loss": 1.049,
+ "step": 275
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997204905938278e-05,
+ "loss": 0.9298,
+ "step": 276
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9971581491024873e-05,
+ "loss": 1.0159,
+ "step": 277
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997111004981962e-05,
+ "loss": 0.9998,
+ "step": 278
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970634735950117e-05,
+ "loss": 1.0013,
+ "step": 279
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970155549600978e-05,
+ "loss": 0.9775,
+ "step": 280
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969672490958304e-05,
+ "loss": 0.9639,
+ "step": 281
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996918556020972e-05,
+ "loss": 0.9604,
+ "step": 282
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996869475754434e-05,
+ "loss": 0.9464,
+ "step": 283
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968200083152784e-05,
+ "loss": 0.9963,
+ "step": 284
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9967701537227175e-05,
+ "loss": 0.9389,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996719911996115e-05,
+ "loss": 0.9534,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996669283154984e-05,
+ "loss": 0.8961,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996618267218988e-05,
+ "loss": 0.9537,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996566864207941e-05,
+ "loss": 0.9773,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9965150741418072e-05,
+ "loss": 0.9985,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964628970407018e-05,
+ "loss": 1.0109,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9964103329248892e-05,
+ "loss": 0.9856,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996357381814785e-05,
+ "loss": 0.9531,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996304043730955e-05,
+ "loss": 1.0416,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962503186941143e-05,
+ "loss": 0.902,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961962067251298e-05,
+ "loss": 1.0041,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961417078450177e-05,
+ "loss": 0.3216,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996086822074945e-05,
+ "loss": 0.9695,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960315494362286e-05,
+ "loss": 1.0055,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959758899503355e-05,
+ "loss": 0.9274,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995919843638883e-05,
+ "loss": 1.0085,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958634105236395e-05,
+ "loss": 1.024,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9958065906265228e-05,
+ "loss": 0.9575,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957493839696013e-05,
+ "loss": 0.931,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956917905750926e-05,
+ "loss": 1.014,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995633810465366e-05,
+ "loss": 0.9083,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99557544366294e-05,
+ "loss": 1.0107,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955166901904838e-05,
+ "loss": 0.9126,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954575500708164e-05,
+ "loss": 0.9656,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995398023326907e-05,
+ "loss": 0.95,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953381099818756e-05,
+ "loss": 0.9424,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952778100589912e-05,
+ "loss": 0.8988,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9952171235816747e-05,
+ "loss": 1.0329,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9951560505734948e-05,
+ "loss": 1.0457,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950945910581718e-05,
+ "loss": 0.8971,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950327450595766e-05,
+ "loss": 0.9726,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949705126017286e-05,
+ "loss": 0.9883,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949078937087988e-05,
+ "loss": 0.987,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994844888405107e-05,
+ "loss": 0.9479,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947814967151246e-05,
+ "loss": 0.9239,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947177186634716e-05,
+ "loss": 0.9383,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946535542749187e-05,
+ "loss": 0.2709,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945890035743866e-05,
+ "loss": 1.053,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945240665869465e-05,
+ "loss": 0.9982,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9944587433378187e-05,
+ "loss": 1.0055,
+ "step": 325
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994393033852374e-05,
+ "loss": 0.9182,
+ "step": 326
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943269381561334e-05,
+ "loss": 1.0582,
+ "step": 327
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994260456274768e-05,
+ "loss": 0.9732,
+ "step": 328
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941935882340976e-05,
+ "loss": 0.9413,
+ "step": 329
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994126334060094e-05,
+ "loss": 0.9014,
+ "step": 330
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994058693778878e-05,
+ "loss": 0.9939,
+ "step": 331
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939906674167192e-05,
+ "loss": 0.9712,
+ "step": 332
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993922255000039e-05,
+ "loss": 0.9642,
+ "step": 333
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993853456555408e-05,
+ "loss": 0.9423,
+ "step": 334
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937842721095468e-05,
+ "loss": 1.0095,
+ "step": 335
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9937147016893257e-05,
+ "loss": 0.9853,
+ "step": 336
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9936447453217646e-05,
+ "loss": 0.9414,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935744030340347e-05,
+ "loss": 0.9975,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9935036748534555e-05,
+ "loss": 1.0131,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993432560807497e-05,
+ "loss": 0.9472,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993361060923779e-05,
+ "loss": 0.9754,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932891752300717e-05,
+ "loss": 0.9018,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9932169037542947e-05,
+ "loss": 0.9971,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9931442465245164e-05,
+ "loss": 0.9472,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930712035689576e-05,
+ "loss": 0.9566,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992997774915986e-05,
+ "loss": 0.9609,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992923960594121e-05,
+ "loss": 1.0305,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9928497606320308e-05,
+ "loss": 0.9794,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992775175058535e-05,
+ "loss": 0.9911,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927002039026002e-05,
+ "loss": 0.8958,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9926248471933453e-05,
+ "loss": 0.998,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9925491049600382e-05,
+ "loss": 0.3126,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924729772320953e-05,
+ "loss": 0.9474,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923964640390846e-05,
+ "loss": 1.0056,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9923195654107227e-05,
+ "loss": 0.952,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992242281376876e-05,
+ "loss": 0.9329,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921646119675606e-05,
+ "loss": 0.9567,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920865572129426e-05,
+ "loss": 1.0107,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920081171433377e-05,
+ "loss": 1.0179,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991929291789211e-05,
+ "loss": 1.0085,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918500811811778e-05,
+ "loss": 0.9612,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991770485350002e-05,
+ "loss": 0.9569,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991690504326597e-05,
+ "loss": 0.9991,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916101381420285e-05,
+ "loss": 0.9678,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9915293868275083e-05,
+ "loss": 0.9505,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9914482504143996e-05,
+ "loss": 0.9855,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9913667289342147e-05,
+ "loss": 0.9686,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991284822418616e-05,
+ "loss": 0.9202,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9912025308994146e-05,
+ "loss": 0.9758,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9911198544085723e-05,
+ "loss": 1.0149,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991036792978199e-05,
+ "loss": 1.0112,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990953346640555e-05,
+ "loss": 0.9875,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9908695154280496e-05,
+ "loss": 0.9437,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907852993732425e-05,
+ "loss": 0.9477,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990700698508842e-05,
+ "loss": 0.9369,
+ "step": 375
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990615712867706e-05,
+ "loss": 0.9131,
+ "step": 376
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9905303424828418e-05,
+ "loss": 1.022,
+ "step": 377
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904445873874068e-05,
+ "loss": 0.9213,
+ "step": 378
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9903584476147066e-05,
+ "loss": 0.9923,
+ "step": 379
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902719231981975e-05,
+ "loss": 0.9728,
+ "step": 380
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9901850141714843e-05,
+ "loss": 0.9763,
+ "step": 381
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900977205683213e-05,
+ "loss": 0.9372,
+ "step": 382
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900100424226124e-05,
+ "loss": 0.9181,
+ "step": 383
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9899219797684113e-05,
+ "loss": 0.9654,
+ "step": 384
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.98983353263992e-05,
+ "loss": 0.9802,
+ "step": 385
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897447010714905e-05,
+ "loss": 0.9552,
+ "step": 386
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989655485097624e-05,
+ "loss": 0.9995,
+ "step": 387
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989565884752971e-05,
+ "loss": 0.9242,
+ "step": 388
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9894759000723308e-05,
+ "loss": 1.0011,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893855310906526e-05,
+ "loss": 0.9923,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892947778430352e-05,
+ "loss": 0.9929,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892036403647256e-05,
+ "loss": 1.0172,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891121186911207e-05,
+ "loss": 0.9918,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9890202128577664e-05,
+ "loss": 0.9967,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988927922900358e-05,
+ "loss": 0.9102,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9888352488547397e-05,
+ "loss": 0.9903,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988742190756905e-05,
+ "loss": 0.9354,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9886487486429966e-05,
+ "loss": 1.0028,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9885549225493064e-05,
+ "loss": 0.9658,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9884607125122753e-05,
+ "loss": 0.94,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988366118568494e-05,
+ "loss": 0.9509,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988271140754701e-05,
+ "loss": 0.9642,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881757791077848e-05,
+ "loss": 0.9849,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9880800336647825e-05,
+ "loss": 0.9676,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987983904462881e-05,
+ "loss": 0.9746,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9878873915394154e-05,
+ "loss": 1.0209,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9877904949318704e-05,
+ "loss": 0.9741,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9876932146778796e-05,
+ "loss": 0.9492,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875955508152254e-05,
+ "loss": 0.2791,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987497503381839e-05,
+ "loss": 0.2944,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9873990724158014e-05,
+ "loss": 0.9199,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987300257955342e-05,
+ "loss": 0.96,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987201060038839e-05,
+ "loss": 0.9848,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9871014787048197e-05,
+ "loss": 0.985,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9870015139919606e-05,
+ "loss": 0.9003,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869011659390866e-05,
+ "loss": 1.0227,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9868004345851716e-05,
+ "loss": 0.9831,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9866993199693393e-05,
+ "loss": 0.9358,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98659782213086e-05,
+ "loss": 0.9757,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986495941109156e-05,
+ "loss": 1.0239,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863936769437956e-05,
+ "loss": 0.9802,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986291029674497e-05,
+ "loss": 0.9725,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986187999341128e-05,
+ "loss": 1.008,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9860845859837034e-05,
+ "loss": 0.9516,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985980789642388e-05,
+ "loss": 0.8976,
+ "step": 425
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985876610357496e-05,
+ "loss": 0.9699,
+ "step": 426
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9857720481694887e-05,
+ "loss": 0.9561,
+ "step": 427
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856671031189765e-05,
+ "loss": 0.9651,
+ "step": 428
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.98556177524672e-05,
+ "loss": 0.9456,
+ "step": 429
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9854560645936262e-05,
+ "loss": 1.0112,
+ "step": 430
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9853499712007523e-05,
+ "loss": 0.9635,
+ "step": 431
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852434951093035e-05,
+ "loss": 0.8902,
+ "step": 432
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985136636360635e-05,
+ "loss": 0.9033,
+ "step": 433
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985029394996248e-05,
+ "loss": 0.2969,
+ "step": 434
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9849217710577945e-05,
+ "loss": 0.3144,
+ "step": 435
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9848137645870745e-05,
+ "loss": 0.9346,
+ "step": 436
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847053756260363e-05,
+ "loss": 0.9726,
+ "step": 437
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984596604216777e-05,
+ "loss": 0.9481,
+ "step": 438
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984487450401542e-05,
+ "loss": 0.9606,
+ "step": 439
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9843779142227258e-05,
+ "loss": 0.8909,
+ "step": 440
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9842679957228706e-05,
+ "loss": 1.0256,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9841576949446675e-05,
+ "loss": 0.9613,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984047011930956e-05,
+ "loss": 0.9351,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9839359467247243e-05,
+ "loss": 0.9766,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983824499369109e-05,
+ "loss": 1.0027,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9837126699073948e-05,
+ "loss": 0.9637,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836004583830146e-05,
+ "loss": 0.9507,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9834878648395507e-05,
+ "loss": 0.9815,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9833748893207326e-05,
+ "loss": 0.9587,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9832615318704388e-05,
+ "loss": 0.8957,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831477925326962e-05,
+ "loss": 1.0338,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98303367135168e-05,
+ "loss": 0.9901,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829191683717133e-05,
+ "loss": 0.9134,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9828042836372677e-05,
+ "loss": 0.9366,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9826890171929634e-05,
+ "loss": 0.9063,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982573369083568e-05,
+ "loss": 0.936,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824573393539984e-05,
+ "loss": 0.9738,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982340928049319e-05,
+ "loss": 0.9805,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241352147426e-05,
+ "loss": 0.9481,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9821069608956307e-05,
+ "loss": 0.9224,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819894051374917e-05,
+ "loss": 0.9435,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981871467985983e-05,
+ "loss": 0.9558,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817531494869105e-05,
+ "loss": 0.9753,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9816344496862272e-05,
+ "loss": 0.9506,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9815153686300352e-05,
+ "loss": 0.9922,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981395906364584e-05,
+ "loss": 1.007,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812760629362714e-05,
+ "loss": 0.9239,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981155838391643e-05,
+ "loss": 0.8999,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9810352327773935e-05,
+ "loss": 0.8966,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809142461403635e-05,
+ "loss": 0.8609,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9807928785275433e-05,
+ "loss": 1.0235,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980671129986071e-05,
+ "loss": 0.9631,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9805490005632323e-05,
+ "loss": 1.0053,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.98042649030646e-05,
+ "loss": 0.9878,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9803035992633366e-05,
+ "loss": 1.0371,
+ "step": 475
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9801803274815915e-05,
+ "loss": 1.0088,
+ "step": 476
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9800566750091018e-05,
+ "loss": 0.9889,
+ "step": 477
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9799326418938924e-05,
+ "loss": 0.9152,
+ "step": 478
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979808228184137e-05,
+ "loss": 0.9684,
+ "step": 479
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9796834339281557e-05,
+ "loss": 0.9171,
+ "step": 480
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979558259174418e-05,
+ "loss": 0.9473,
+ "step": 481
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794327039715395e-05,
+ "loss": 0.9039,
+ "step": 482
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979306768368285e-05,
+ "loss": 0.9673,
+ "step": 483
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9791804524135663e-05,
+ "loss": 0.9844,
+ "step": 484
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979053756156443e-05,
+ "loss": 0.9177,
+ "step": 485
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789266796461222e-05,
+ "loss": 0.9703,
+ "step": 486
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9787992229319594e-05,
+ "loss": 0.9167,
+ "step": 487
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978671386063457e-05,
+ "loss": 0.9837,
+ "step": 488
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978543169090265e-05,
+ "loss": 0.3561,
+ "step": 489
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9784145720621827e-05,
+ "loss": 0.9968,
+ "step": 490
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9782855950291542e-05,
+ "loss": 0.963,
+ "step": 491
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.978156238041274e-05,
+ "loss": 0.9685,
+ "step": 492
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9780265011487822e-05,
+ "loss": 0.9381,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9778963844020668e-05,
+ "loss": 0.9407,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977765887851664e-05,
+ "loss": 0.9165,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977635011548257e-05,
+ "loss": 0.3316,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775037555426772e-05,
+ "loss": 0.9159,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773721198859024e-05,
+ "loss": 0.9379,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9772401046290584e-05,
+ "loss": 0.9768,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9771077098234187e-05,
+ "loss": 0.9927,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9769749355204034e-05,
+ "loss": 0.9546,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976841781771581e-05,
+ "loss": 0.9958,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767082486286667e-05,
+ "loss": 0.9466,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9765743361435234e-05,
+ "loss": 0.9159,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9764400443681607e-05,
+ "loss": 0.8767,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9763053733547367e-05,
+ "loss": 1.0446,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976170323155555e-05,
+ "loss": 0.9448,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.976034893823069e-05,
+ "loss": 0.9132,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975899085409876e-05,
+ "loss": 0.9193,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9757628979687247e-05,
+ "loss": 0.9665,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975626331552507e-05,
+ "loss": 0.9742,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9754893862142643e-05,
+ "loss": 0.294,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9753520620071846e-05,
+ "loss": 1.0007,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9752143589846027e-05,
+ "loss": 0.9719,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750762772000014e-05,
+ "loss": 0.9857,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9749378167070097e-05,
+ "loss": 0.9214,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9747989775594044e-05,
+ "loss": 0.9272,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974659759811109e-05,
+ "loss": 0.9527,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9745201635161938e-05,
+ "loss": 0.9597,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9743801887288762e-05,
+ "loss": 0.9602,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9742398355035212e-05,
+ "loss": 0.9721,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9740991038946404e-05,
+ "loss": 0.3203,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973957993956892e-05,
+ "loss": 0.9899,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9738165057450817e-05,
+ "loss": 0.9541,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736746393141617e-05,
+ "loss": 0.9986,
+ "step": 525
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9735323947192317e-05,
+ "loss": 1.0268,
+ "step": 526
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733897720155377e-05,
+ "loss": 0.9229,
+ "step": 527
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9732467712584723e-05,
+ "loss": 0.9152,
+ "step": 528
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973103392503576e-05,
+ "loss": 1.0084,
+ "step": 529
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9729596358065347e-05,
+ "loss": 0.9658,
+ "step": 530
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9728155012231825e-05,
+ "loss": 0.9228,
+ "step": 531
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9726709888094994e-05,
+ "loss": 0.9909,
+ "step": 532
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972526098621612e-05,
+ "loss": 0.955,
+ "step": 533
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972380830715795e-05,
+ "loss": 0.9968,
+ "step": 534
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9722351851484677e-05,
+ "loss": 0.9466,
+ "step": 535
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9720891619761974e-05,
+ "loss": 0.9519,
+ "step": 536
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9719427612556982e-05,
+ "loss": 1.0199,
+ "step": 537
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9717959830438302e-05,
+ "loss": 0.9054,
+ "step": 538
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9716488273976006e-05,
+ "loss": 0.9618,
+ "step": 539
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971501294374162e-05,
+ "loss": 0.9405,
+ "step": 540
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971353384030816e-05,
+ "loss": 0.9531,
+ "step": 541
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9712050964250083e-05,
+ "loss": 0.9163,
+ "step": 542
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9710564316143323e-05,
+ "loss": 0.9231,
+ "step": 543
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9709073896565276e-05,
+ "loss": 0.9702,
+ "step": 544
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9707579706094807e-05,
+ "loss": 0.9434,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970608174531224e-05,
+ "loss": 0.9116,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970458001479937e-05,
+ "loss": 0.9492,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9703074515139445e-05,
+ "loss": 0.951,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9701565246917184e-05,
+ "loss": 0.968,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9700052210718775e-05,
+ "loss": 0.9962,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969853540713186e-05,
+ "loss": 1.0122,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9697014836745552e-05,
+ "loss": 0.9703,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9695490500150418e-05,
+ "loss": 0.9328,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693962397938495e-05,
+ "loss": 0.97,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9692430530703282e-05,
+ "loss": 0.9872,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690894899039735e-05,
+ "loss": 1.015,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9689355503544277e-05,
+ "loss": 0.903,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968781234481479e-05,
+ "loss": 0.9144,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686265423450624e-05,
+ "loss": 0.9404,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9684714740052584e-05,
+ "loss": 0.9781,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9683160295222934e-05,
+ "loss": 0.9543,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9681602089565403e-05,
+ "loss": 0.9393,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968004012368518e-05,
+ "loss": 0.9467,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967847439818892e-05,
+ "loss": 0.8951,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9676904913684725e-05,
+ "loss": 0.9328,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967533167078217e-05,
+ "loss": 0.9344,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9673754670092283e-05,
+ "loss": 0.979,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9672173912227556e-05,
+ "loss": 0.9351,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967058939780193e-05,
+ "loss": 0.9756,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966900112743082e-05,
+ "loss": 0.886,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966740910173108e-05,
+ "loss": 0.9123,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9665813321321054e-05,
+ "loss": 0.958,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9664213786820502e-05,
+ "loss": 0.956,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9662610498850684e-05,
+ "loss": 0.928,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9661003458034288e-05,
+ "loss": 0.9454,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965939266499547e-05,
+ "loss": 0.3032,
+ "step": 576
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9657778120359848e-05,
+ "loss": 0.356,
+ "step": 577
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965615982475449e-05,
+ "loss": 0.9396,
+ "step": 578
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9654537778807924e-05,
+ "loss": 0.9366,
+ "step": 579
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9652911983150135e-05,
+ "loss": 0.9995,
+ "step": 580
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965128243841256e-05,
+ "loss": 0.9299,
+ "step": 581
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96496491452281e-05,
+ "loss": 1.0017,
+ "step": 582
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9648012104231106e-05,
+ "loss": 0.9755,
+ "step": 583
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964637131605738e-05,
+ "loss": 0.9781,
+ "step": 584
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9644726781344197e-05,
+ "loss": 0.9331,
+ "step": 585
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964307850073026e-05,
+ "loss": 0.9641,
+ "step": 586
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.964142647485576e-05,
+ "loss": 0.948,
+ "step": 587
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9639770704362305e-05,
+ "loss": 0.9493,
+ "step": 588
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9638111189892994e-05,
+ "loss": 0.9352,
+ "step": 589
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636447932092354e-05,
+ "loss": 0.9591,
+ "step": 590
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963478093160638e-05,
+ "loss": 0.359,
+ "step": 591
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633110189082515e-05,
+ "loss": 0.94,
+ "step": 592
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963143570516965e-05,
+ "loss": 0.9336,
+ "step": 593
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629757480518144e-05,
+ "loss": 0.9057,
+ "step": 594
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9628075515779796e-05,
+ "loss": 0.9284,
+ "step": 595
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.962638981160786e-05,
+ "loss": 0.9744,
+ "step": 596
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9624700368657045e-05,
+ "loss": 0.9535,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9623007187583518e-05,
+ "loss": 0.9348,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962131026904488e-05,
+ "loss": 0.9052,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.96196096137002e-05,
+ "loss": 0.981,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9617905222209998e-05,
+ "loss": 0.9034,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961619709523623e-05,
+ "loss": 0.9294,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9614485233442316e-05,
+ "loss": 0.9432,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961276963749313e-05,
+ "loss": 0.9437,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9611050308054982e-05,
+ "loss": 0.9222,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9609327245795642e-05,
+ "loss": 0.9645,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9607600451384327e-05,
+ "loss": 0.9591,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960586992549171e-05,
+ "loss": 0.3233,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9604135668789897e-05,
+ "loss": 0.3091,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9602397681952462e-05,
+ "loss": 0.9029,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9600655965654413e-05,
+ "loss": 0.9137,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959891052057222e-05,
+ "loss": 0.9258,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597161347383783e-05,
+ "loss": 1.0029,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959540844676847e-05,
+ "loss": 0.9326,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9593651819407084e-05,
+ "loss": 0.9146,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959189146598188e-05,
+ "loss": 0.9942,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590127387176556e-05,
+ "loss": 0.9462,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9588359583676263e-05,
+ "loss": 0.9417,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9586588056167595e-05,
+ "loss": 0.9543,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958481280533859e-05,
+ "loss": 0.9091,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958303383187874e-05,
+ "loss": 0.9614,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9581251136478974e-05,
+ "loss": 0.966,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579464719831668e-05,
+ "loss": 1.0124,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9577674582630653e-05,
+ "loss": 0.9958,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957588072557119e-05,
+ "loss": 0.9447,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957408314935e-05,
+ "loss": 0.8778,
+ "step": 626
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9572281854665233e-05,
+ "loss": 0.9647,
+ "step": 627
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95704768422165e-05,
+ "loss": 0.9164,
+ "step": 628
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956866811270484e-05,
+ "loss": 0.9681,
+ "step": 629
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9566855666832743e-05,
+ "loss": 0.9696,
+ "step": 630
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9565039505304145e-05,
+ "loss": 0.9038,
+ "step": 631
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956321962882442e-05,
+ "loss": 0.9858,
+ "step": 632
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956139603810039e-05,
+ "loss": 0.9405,
+ "step": 633
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9559568733840317e-05,
+ "loss": 0.9368,
+ "step": 634
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9557737716753898e-05,
+ "loss": 0.9261,
+ "step": 635
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9555902987552283e-05,
+ "loss": 0.952,
+ "step": 636
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9554064546948064e-05,
+ "loss": 0.9369,
+ "step": 637
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9552222395655262e-05,
+ "loss": 0.8745,
+ "step": 638
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9550376534389355e-05,
+ "loss": 0.9598,
+ "step": 639
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9548526963867253e-05,
+ "loss": 0.985,
+ "step": 640
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9546673684807303e-05,
+ "loss": 0.9148,
+ "step": 641
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95448166979293e-05,
+ "loss": 0.9259,
+ "step": 642
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9542956003954477e-05,
+ "loss": 0.9543,
+ "step": 643
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541091603605508e-05,
+ "loss": 0.8677,
+ "step": 644
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95392234976065e-05,
+ "loss": 0.9009,
+ "step": 645
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537351686683003e-05,
+ "loss": 0.9323,
+ "step": 646
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9535476171562014e-05,
+ "loss": 0.9528,
+ "step": 647
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533596952971955e-05,
+ "loss": 0.9111,
+ "step": 648
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9531714031642698e-05,
+ "loss": 0.8794,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529827408305542e-05,
+ "loss": 0.9228,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9527937083693233e-05,
+ "loss": 0.978,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952604305853995e-05,
+ "loss": 0.9506,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9524145333581315e-05,
+ "loss": 0.994,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9522243909554375e-05,
+ "loss": 0.969,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952033878719763e-05,
+ "loss": 1.0084,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518429967251e-05,
+ "loss": 0.3728,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9516517450455853e-05,
+ "loss": 0.9258,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951460123755499e-05,
+ "loss": 0.8782,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9512681329292635e-05,
+ "loss": 0.9232,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.951075772641447e-05,
+ "loss": 0.9109,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95088304296676e-05,
+ "loss": 0.9142,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950689943980056e-05,
+ "loss": 0.9372,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9504964757563322e-05,
+ "loss": 0.9459,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95030263837073e-05,
+ "loss": 0.9116,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9501084318985335e-05,
+ "loss": 0.9795,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94991385641517e-05,
+ "loss": 0.9757,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9497189119962105e-05,
+ "loss": 0.987,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9495235987173693e-05,
+ "loss": 0.8944,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949327916654504e-05,
+ "loss": 0.985,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.949131865883614e-05,
+ "loss": 0.8853,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948935446480845e-05,
+ "loss": 0.91,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948738658522483e-05,
+ "loss": 0.9634,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485415020849583e-05,
+ "loss": 0.358,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9483439772448444e-05,
+ "loss": 0.8701,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481460840788573e-05,
+ "loss": 0.8917,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9479478226638565e-05,
+ "loss": 0.3685,
+ "step": 676
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.947749193076845e-05,
+ "loss": 0.9397,
+ "step": 677
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9475501953949674e-05,
+ "loss": 0.9149,
+ "step": 678
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473508296955126e-05,
+ "loss": 0.3384,
+ "step": 679
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9471510960559122e-05,
+ "loss": 0.9735,
+ "step": 680
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469509945537395e-05,
+ "loss": 1.0004,
+ "step": 681
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9467505252667126e-05,
+ "loss": 0.9774,
+ "step": 682
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9465496882726913e-05,
+ "loss": 0.9733,
+ "step": 683
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946348483649678e-05,
+ "loss": 0.9641,
+ "step": 684
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461469114758184e-05,
+ "loss": 0.9397,
+ "step": 685
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9459449718294008e-05,
+ "loss": 0.9411,
+ "step": 686
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945742664788856e-05,
+ "loss": 0.9532,
+ "step": 687
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9455399904327585e-05,
+ "loss": 0.9781,
+ "step": 688
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945336948839824e-05,
+ "loss": 0.9334,
+ "step": 689
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9451335400889114e-05,
+ "loss": 0.9683,
+ "step": 690
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944929764259023e-05,
+ "loss": 0.3255,
+ "step": 691
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9447256214293026e-05,
+ "loss": 0.9136,
+ "step": 692
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9445211116790365e-05,
+ "loss": 0.9113,
+ "step": 693
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9443162350876544e-05,
+ "loss": 0.9494,
+ "step": 694
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944110991734728e-05,
+ "loss": 0.8912,
+ "step": 695
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9439053816999717e-05,
+ "loss": 0.9126,
+ "step": 696
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9436994050632415e-05,
+ "loss": 0.932,
+ "step": 697
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9434930619045367e-05,
+ "loss": 0.9146,
+ "step": 698
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9432863523039986e-05,
+ "loss": 0.9433,
+ "step": 699
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9430792763419105e-05,
+ "loss": 0.949,
+ "step": 700
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.942871834098699e-05,
+ "loss": 0.9449,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9426640256549313e-05,
+ "loss": 0.3057,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9424558510913186e-05,
+ "loss": 0.968,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422473104887133e-05,
+ "loss": 0.9604,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9420384039281103e-05,
+ "loss": 0.9836,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941829131490646e-05,
+ "loss": 0.9014,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9416194932576e-05,
+ "loss": 0.9281,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941409489310393e-05,
+ "loss": 0.8974,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9411991197305878e-05,
+ "loss": 0.9138,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9409883845998905e-05,
+ "loss": 0.955,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9407772840001473e-05,
+ "loss": 0.9367,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405658180133477e-05,
+ "loss": 0.9613,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9403539867216226e-05,
+ "loss": 0.9882,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9401417902072447e-05,
+ "loss": 0.9232,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9399292285526286e-05,
+ "loss": 1.0081,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939716301840331e-05,
+ "loss": 0.9325,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9395030101530504e-05,
+ "loss": 0.969,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939289353573626e-05,
+ "loss": 0.9948,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9390753321850404e-05,
+ "loss": 0.9315,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938860946070417e-05,
+ "loss": 0.9175,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93864619531302e-05,
+ "loss": 0.9635,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9384310799962575e-05,
+ "loss": 0.9699,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9382156002036764e-05,
+ "loss": 0.8872,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9379997560189677e-05,
+ "loss": 0.9445,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937783547525962e-05,
+ "loss": 0.9454,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9375669748086326e-05,
+ "loss": 0.9394,
+ "step": 726
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937350037951094e-05,
+ "loss": 0.9521,
+ "step": 727
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9371327370376018e-05,
+ "loss": 0.9497,
+ "step": 728
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936915072152553e-05,
+ "loss": 0.912,
+ "step": 729
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936697043380486e-05,
+ "loss": 0.3029,
+ "step": 730
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936478650806081e-05,
+ "loss": 0.9439,
+ "step": 731
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.936259894514159e-05,
+ "loss": 0.9478,
+ "step": 732
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360407745896828e-05,
+ "loss": 0.9617,
+ "step": 733
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9358212911177556e-05,
+ "loss": 0.8989,
+ "step": 734
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935601444183622e-05,
+ "loss": 0.9565,
+ "step": 735
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935381233872669e-05,
+ "loss": 0.8481,
+ "step": 736
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935160660270423e-05,
+ "loss": 0.9559,
+ "step": 737
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934939723462552e-05,
+ "loss": 0.907,
+ "step": 738
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9347184235348663e-05,
+ "loss": 0.9452,
+ "step": 739
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9344967605733154e-05,
+ "loss": 0.8931,
+ "step": 740
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934274734663991e-05,
+ "loss": 0.9234,
+ "step": 741
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934052345893125e-05,
+ "loss": 0.9326,
+ "step": 742
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9338295943470915e-05,
+ "loss": 0.9554,
+ "step": 743
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9336064801124034e-05,
+ "loss": 0.9385,
+ "step": 744
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933383003275717e-05,
+ "loss": 0.9185,
+ "step": 745
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933159163923827e-05,
+ "loss": 0.947,
+ "step": 746
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9329349621436708e-05,
+ "loss": 0.9201,
+ "step": 747
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9327103980223255e-05,
+ "loss": 0.3265,
+ "step": 748
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932485471647009e-05,
+ "loss": 0.3056,
+ "step": 749
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9322601831050804e-05,
+ "loss": 0.9428,
+ "step": 750
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9320345324840396e-05,
+ "loss": 0.9848,
+ "step": 751
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318085198715257e-05,
+ "loss": 0.9482,
+ "step": 752
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93158214535532e-05,
+ "loss": 0.9088,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9313554090233436e-05,
+ "loss": 0.9325,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9311283109636586e-05,
+ "loss": 0.9473,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309008512644668e-05,
+ "loss": 0.9608,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930673030014111e-05,
+ "loss": 1.0158,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930444847301075e-05,
+ "loss": 0.953,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9302163032139813e-05,
+ "loss": 0.9279,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9299873978415947e-05,
+ "loss": 0.9526,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9297581312728187e-05,
+ "loss": 0.9076,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528503596698e-05,
+ "loss": 0.9154,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929298514902418e-05,
+ "loss": 0.9768,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929068165279303e-05,
+ "loss": 0.9425,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928837454816818e-05,
+ "loss": 0.9292,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9286063836045687e-05,
+ "loss": 0.9183,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9283749517323e-05,
+ "loss": 0.9553,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928143159289898e-05,
+ "loss": 0.9285,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927911006367388e-05,
+ "loss": 0.8953,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927678493054935e-05,
+ "loss": 1.01,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9274456194428454e-05,
+ "loss": 0.8866,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9272123856215643e-05,
+ "loss": 0.948,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9269787916816764e-05,
+ "loss": 0.9182,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9267448377139074e-05,
+ "loss": 0.9536,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9265105238091227e-05,
+ "loss": 0.9415,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9262758500583265e-05,
+ "loss": 0.8991,
+ "step": 776
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9260408165526638e-05,
+ "loss": 0.9534,
+ "step": 777
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9258054233834184e-05,
+ "loss": 0.8794,
+ "step": 778
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9255696706420147e-05,
+ "loss": 0.9673,
+ "step": 779
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9253335584200165e-05,
+ "loss": 0.8788,
+ "step": 780
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9250970868091268e-05,
+ "loss": 0.9376,
+ "step": 781
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924860255901188e-05,
+ "loss": 0.8811,
+ "step": 782
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246230657881834e-05,
+ "loss": 0.9476,
+ "step": 783
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9243855165622345e-05,
+ "loss": 0.9362,
+ "step": 784
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241476083156026e-05,
+ "loss": 0.9155,
+ "step": 785
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9239093411406885e-05,
+ "loss": 0.9678,
+ "step": 786
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236707151300326e-05,
+ "loss": 0.8807,
+ "step": 787
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9234317303763145e-05,
+ "loss": 0.8831,
+ "step": 788
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231923869723528e-05,
+ "loss": 0.3529,
+ "step": 789
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922952685011106e-05,
+ "loss": 0.964,
+ "step": 790
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9227126245856716e-05,
+ "loss": 0.3712,
+ "step": 791
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922472205789286e-05,
+ "loss": 0.9838,
+ "step": 792
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222314287153255e-05,
+ "loss": 0.299,
+ "step": 793
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9219902934573048e-05,
+ "loss": 0.9794,
+ "step": 794
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9217488001088784e-05,
+ "loss": 0.9706,
+ "step": 795
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9215069487638396e-05,
+ "loss": 0.991,
+ "step": 796
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.92126473951612e-05,
+ "loss": 0.9366,
+ "step": 797
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921022172459791e-05,
+ "loss": 0.9889,
+ "step": 798
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920779247689064e-05,
+ "loss": 0.9183,
+ "step": 799
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9205359652982868e-05,
+ "loss": 0.9689,
+ "step": 800
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9202923253819482e-05,
+ "loss": 0.9095,
+ "step": 801
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920048328034675e-05,
+ "loss": 0.8812,
+ "step": 802
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9198039733512326e-05,
+ "loss": 0.9137,
+ "step": 803
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9195592614265262e-05,
+ "loss": 0.975,
+ "step": 804
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9193141923555984e-05,
+ "loss": 0.8885,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.919068766233632e-05,
+ "loss": 0.854,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9188229831559468e-05,
+ "loss": 0.9822,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9185768432180026e-05,
+ "loss": 0.912,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9183303465153972e-05,
+ "loss": 0.9205,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9180834931438673e-05,
+ "loss": 0.9676,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917836283199288e-05,
+ "loss": 0.8845,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917588716777672e-05,
+ "loss": 0.9723,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917340793975172e-05,
+ "loss": 0.9122,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.917092514888078e-05,
+ "loss": 0.9583,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9168438796128193e-05,
+ "loss": 0.9079,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9165948882459623e-05,
+ "loss": 0.8845,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9163455408842123e-05,
+ "loss": 0.9921,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160958376244138e-05,
+ "loss": 0.9166,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9158457785635478e-05,
+ "loss": 0.9785,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915595363798735e-05,
+ "loss": 0.8986,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915344593427233e-05,
+ "loss": 0.9226,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150934675464384e-05,
+ "loss": 0.8712,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9148419862538858e-05,
+ "loss": 0.9654,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145901496472474e-05,
+ "loss": 0.9771,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9143379578243335e-05,
+ "loss": 0.9436,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.914085410883093e-05,
+ "loss": 0.9894,
+ "step": 826
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9138325089216118e-05,
+ "loss": 0.9068,
+ "step": 827
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913579252038114e-05,
+ "loss": 0.9422,
+ "step": 828
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9133256403309627e-05,
+ "loss": 0.9182,
+ "step": 829
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913071673898656e-05,
+ "loss": 0.9261,
+ "step": 830
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.912817352839833e-05,
+ "loss": 0.8802,
+ "step": 831
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9125626772532683e-05,
+ "loss": 0.877,
+ "step": 832
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9123076472378753e-05,
+ "loss": 0.9579,
+ "step": 833
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9120522628927047e-05,
+ "loss": 0.9898,
+ "step": 834
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9117965243169444e-05,
+ "loss": 0.9051,
+ "step": 835
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9115404316099212e-05,
+ "loss": 0.9402,
+ "step": 836
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9112839848710978e-05,
+ "loss": 0.9451,
+ "step": 837
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9110271842000755e-05,
+ "loss": 0.3687,
+ "step": 838
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9107700296965926e-05,
+ "loss": 0.9534,
+ "step": 839
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910512521460525e-05,
+ "loss": 0.9271,
+ "step": 840
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9102546595918857e-05,
+ "loss": 1.0075,
+ "step": 841
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9099964441908258e-05,
+ "loss": 0.9131,
+ "step": 842
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097378753576327e-05,
+ "loss": 0.9214,
+ "step": 843
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9094789531927315e-05,
+ "loss": 0.9203,
+ "step": 844
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.909219677796685e-05,
+ "loss": 0.9698,
+ "step": 845
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9089600492701926e-05,
+ "loss": 0.345,
+ "step": 846
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908700067714091e-05,
+ "loss": 0.3491,
+ "step": 847
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9084397332293537e-05,
+ "loss": 0.9524,
+ "step": 848
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081790459170926e-05,
+ "loss": 0.9527,
+ "step": 849
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9079180058785547e-05,
+ "loss": 0.9158,
+ "step": 850
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9076566132151255e-05,
+ "loss": 0.9576,
+ "step": 851
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.907394868028326e-05,
+ "loss": 0.9538,
+ "step": 852
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9071327704198163e-05,
+ "loss": 0.9251,
+ "step": 853
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906870320491391e-05,
+ "loss": 0.8861,
+ "step": 854
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9066075183449835e-05,
+ "loss": 0.9199,
+ "step": 855
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9063443640826624e-05,
+ "loss": 0.8862,
+ "step": 856
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906080857806634e-05,
+ "loss": 0.9411,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905816999619242e-05,
+ "loss": 0.955,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9055527896229642e-05,
+ "loss": 0.9152,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905288227920418e-05,
+ "loss": 0.8776,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9050233146143554e-05,
+ "loss": 0.9462,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9047580498076663e-05,
+ "loss": 1.0074,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904492433603376e-05,
+ "loss": 0.8786,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904226466104647e-05,
+ "loss": 0.9435,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903960147414778e-05,
+ "loss": 0.9668,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.903693477637204e-05,
+ "loss": 0.3139,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9034264568754967e-05,
+ "loss": 0.9452,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9031590852333637e-05,
+ "loss": 0.9357,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9028913628146487e-05,
+ "loss": 0.9148,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902623289723333e-05,
+ "loss": 0.9018,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902354866063532e-05,
+ "loss": 0.9095,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020860919394992e-05,
+ "loss": 0.8821,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9018169674556228e-05,
+ "loss": 0.8456,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901547492716428e-05,
+ "loss": 0.9265,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9012776678265756e-05,
+ "loss": 0.9095,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9010074928908624e-05,
+ "loss": 0.9256,
+ "step": 876
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900736968014221e-05,
+ "loss": 0.9216,
+ "step": 877
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004660933017208e-05,
+ "loss": 0.9195,
+ "step": 878
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900194868858566e-05,
+ "loss": 0.9252,
+ "step": 879
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8999232947900968e-05,
+ "loss": 0.916,
+ "step": 880
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89965137120179e-05,
+ "loss": 0.3312,
+ "step": 881
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899379098199257e-05,
+ "loss": 0.8992,
+ "step": 882
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899106475888246e-05,
+ "loss": 0.9512,
+ "step": 883
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89883350437464e-05,
+ "loss": 0.9483,
+ "step": 884
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8985601837644586e-05,
+ "loss": 0.954,
+ "step": 885
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8982865141638557e-05,
+ "loss": 0.9777,
+ "step": 886
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8980124956791216e-05,
+ "loss": 0.9181,
+ "step": 887
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8977381284166818e-05,
+ "loss": 0.9654,
+ "step": 888
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897463412483098e-05,
+ "loss": 0.8833,
+ "step": 889
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897188347985066e-05,
+ "loss": 0.9304,
+ "step": 890
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896912935029418e-05,
+ "loss": 0.9227,
+ "step": 891
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896637173723121e-05,
+ "loss": 0.9524,
+ "step": 892
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8963610641732777e-05,
+ "loss": 0.9938,
+ "step": 893
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8960846064871257e-05,
+ "loss": 0.8756,
+ "step": 894
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8958078007720387e-05,
+ "loss": 0.8807,
+ "step": 895
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895530647135524e-05,
+ "loss": 0.9026,
+ "step": 896
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8952531456852248e-05,
+ "loss": 0.3359,
+ "step": 897
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8949752965289197e-05,
+ "loss": 0.8913,
+ "step": 898
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894697099774523e-05,
+ "loss": 0.8688,
+ "step": 899
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894418555530082e-05,
+ "loss": 0.9398,
+ "step": 900
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89413966390378e-05,
+ "loss": 0.9413,
+ "step": 901
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8938604250039362e-05,
+ "loss": 0.9731,
+ "step": 902
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935808389390032e-05,
+ "loss": 0.9106,
+ "step": 903
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.893300905817569e-05,
+ "loss": 0.8899,
+ "step": 904
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8930206257483566e-05,
+ "loss": 0.983,
+ "step": 905
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8927399988402233e-05,
+ "loss": 0.9512,
+ "step": 906
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8924590252021614e-05,
+ "loss": 0.9165,
+ "step": 907
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8921777049432985e-05,
+ "loss": 0.35,
+ "step": 908
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8918960381728947e-05,
+ "loss": 0.9625,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8916140250003475e-05,
+ "loss": 0.905,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891331665535187e-05,
+ "loss": 0.9542,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8910489598870784e-05,
+ "loss": 0.9589,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8907659081658214e-05,
+ "loss": 0.9409,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8904825104813497e-05,
+ "loss": 0.89,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8901987669437322e-05,
+ "loss": 0.944,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.889914677663171e-05,
+ "loss": 0.9217,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8896302427500042e-05,
+ "loss": 0.8912,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8893454623147017e-05,
+ "loss": 0.9592,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88906033646787e-05,
+ "loss": 0.9194,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8887748653202478e-05,
+ "loss": 0.9415,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8884890489827097e-05,
+ "loss": 0.8378,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8882028875662627e-05,
+ "loss": 0.8577,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8879163811820493e-05,
+ "loss": 0.9159,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876295299413445e-05,
+ "loss": 0.8698,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8873423339555584e-05,
+ "loss": 0.9418,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8870547933362352e-05,
+ "loss": 0.3612,
+ "step": 926
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886766908195051e-05,
+ "loss": 0.9417,
+ "step": 927
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864786786438187e-05,
+ "loss": 0.9079,
+ "step": 928
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.886190104794482e-05,
+ "loss": 0.9634,
+ "step": 929
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8859011867591203e-05,
+ "loss": 0.9203,
+ "step": 930
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885611924649946e-05,
+ "loss": 0.9226,
+ "step": 931
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885322318579305e-05,
+ "loss": 0.9712,
+ "step": 932
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8850323686596766e-05,
+ "loss": 0.9656,
+ "step": 933
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8847420750036748e-05,
+ "loss": 0.8569,
+ "step": 934
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884451437724046e-05,
+ "loss": 0.9105,
+ "step": 935
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8841604569336702e-05,
+ "loss": 0.9168,
+ "step": 936
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883869132745561e-05,
+ "loss": 0.8854,
+ "step": 937
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883577465272866e-05,
+ "loss": 0.8706,
+ "step": 938
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8832854546288642e-05,
+ "loss": 0.9097,
+ "step": 939
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8829931009269707e-05,
+ "loss": 0.9096,
+ "step": 940
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882700404280731e-05,
+ "loss": 0.932,
+ "step": 941
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8824073648038258e-05,
+ "loss": 0.9086,
+ "step": 942
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882113982610068e-05,
+ "loss": 0.9184,
+ "step": 943
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881820257813404e-05,
+ "loss": 0.9778,
+ "step": 944
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8815261905279133e-05,
+ "loss": 0.9168,
+ "step": 945
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8812317808678075e-05,
+ "loss": 0.3528,
+ "step": 946
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8809370289474327e-05,
+ "loss": 0.9731,
+ "step": 947
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8806419348812673e-05,
+ "loss": 0.9179,
+ "step": 948
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8803464987839217e-05,
+ "loss": 0.9582,
+ "step": 949
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.88005072077014e-05,
+ "loss": 0.947,
+ "step": 950
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797546009547996e-05,
+ "loss": 0.958,
+ "step": 951
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.879458139452909e-05,
+ "loss": 0.9568,
+ "step": 952
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791613363796118e-05,
+ "loss": 0.9292,
+ "step": 953
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8788641918501817e-05,
+ "loss": 0.8576,
+ "step": 954
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785667059800264e-05,
+ "loss": 0.8156,
+ "step": 955
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8782688788846865e-05,
+ "loss": 0.8837,
+ "step": 956
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877970710679834e-05,
+ "loss": 0.9345,
+ "step": 957
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877672201481275e-05,
+ "loss": 0.8941,
+ "step": 958
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877373351404946e-05,
+ "loss": 0.9011,
+ "step": 959
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8770741605669173e-05,
+ "loss": 0.9253,
+ "step": 960
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.876774629083391e-05,
+ "loss": 0.9121,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8764747570707017e-05,
+ "loss": 0.937,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8761745446453167e-05,
+ "loss": 0.9532,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875873991923835e-05,
+ "loss": 0.9758,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875573099022987e-05,
+ "loss": 0.9477,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8752718660596367e-05,
+ "loss": 0.9285,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8749702931507797e-05,
+ "loss": 0.904,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.874668380413543e-05,
+ "loss": 0.8815,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8743661279651856e-05,
+ "loss": 0.9679,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8740635359231e-05,
+ "loss": 0.9621,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8737606044048086e-05,
+ "loss": 0.8696,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873457333527967e-05,
+ "loss": 0.9741,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873153723410362e-05,
+ "loss": 0.3411,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8728497741699115e-05,
+ "loss": 0.9085,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872545485924667e-05,
+ "loss": 0.9056,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8722408587928104e-05,
+ "loss": 1.0055,
+ "step": 976
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8719358928926546e-05,
+ "loss": 0.9415,
+ "step": 977
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8716305883426456e-05,
+ "loss": 0.8918,
+ "step": 978
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.87132494526136e-05,
+ "loss": 0.8949,
+ "step": 979
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710189637675055e-05,
+ "loss": 0.8764,
+ "step": 980
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8707126439799225e-05,
+ "loss": 0.8984,
+ "step": 981
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.870405986017582e-05,
+ "loss": 0.8995,
+ "step": 982
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8700989899995857e-05,
+ "loss": 0.9052,
+ "step": 983
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697916560451682e-05,
+ "loss": 0.923,
+ "step": 984
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.869483984273694e-05,
+ "loss": 0.8883,
+ "step": 985
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691759748046594e-05,
+ "loss": 0.952,
+ "step": 986
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8688676277576916e-05,
+ "loss": 0.971,
+ "step": 987
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868558943252549e-05,
+ "loss": 0.908,
+ "step": 988
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868249921409122e-05,
+ "loss": 0.9385,
+ "step": 989
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8679405623474294e-05,
+ "loss": 0.9478,
+ "step": 990
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8676308661876242e-05,
+ "loss": 0.9882,
+ "step": 991
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8673208330499884e-05,
+ "loss": 0.898,
+ "step": 992
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8670104630549348e-05,
+ "loss": 0.9524,
+ "step": 993
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866699756323008e-05,
+ "loss": 0.8181,
+ "step": 994
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866388712974883e-05,
+ "loss": 0.9904,
+ "step": 995
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866077333131365e-05,
+ "loss": 0.8949,
+ "step": 996
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8657656169133908e-05,
+ "loss": 0.9498,
+ "step": 997
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8654535644420276e-05,
+ "loss": 0.8722,
+ "step": 998
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8651411758384718e-05,
+ "loss": 0.8998,
+ "step": 999
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8648284512240527e-05,
+ "loss": 0.8935,
+ "step": 1000
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8645153907202285e-05,
+ "loss": 0.8978,
+ "step": 1001
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8642019944485884e-05,
+ "loss": 0.331,
+ "step": 1002
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863888262530852e-05,
+ "loss": 0.9529,
+ "step": 1003
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863574195088869e-05,
+ "loss": 0.8517,
+ "step": 1004
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8632597922446195e-05,
+ "loss": 0.9371,
+ "step": 1005
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8629450541202142e-05,
+ "loss": 0.971,
+ "step": 1006
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626299808378933e-05,
+ "loss": 0.9409,
+ "step": 1007
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.862314572520028e-05,
+ "loss": 0.9195,
+ "step": 1008
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861998829289119e-05,
+ "loss": 1.0161,
+ "step": 1009
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861682751267798e-05,
+ "loss": 0.8846,
+ "step": 1010
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861366338578825e-05,
+ "loss": 0.9276,
+ "step": 1011
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8610495913450922e-05,
+ "loss": 0.9159,
+ "step": 1012
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8607325096896197e-05,
+ "loss": 0.3851,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8604150937355588e-05,
+ "loss": 0.8811,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.86009734360619e-05,
+ "loss": 0.9636,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8597792594249237e-05,
+ "loss": 0.9326,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8594608413153e-05,
+ "loss": 0.9532,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8591420894009897e-05,
+ "loss": 0.9115,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8588230038057913e-05,
+ "loss": 0.9345,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8585035846536347e-05,
+ "loss": 0.9732,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8581838320685782e-05,
+ "loss": 0.96,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8578637461748105e-05,
+ "loss": 0.946,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857543327096649e-05,
+ "loss": 0.9461,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.85722257495854e-05,
+ "loss": 0.9272,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856901489885061e-05,
+ "loss": 0.9934,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.856580072000918e-05,
+ "loss": 0.8986,
+ "step": 1026
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8562583214309447e-05,
+ "loss": 0.9382,
+ "step": 1027
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855936238300106e-05,
+ "loss": 0.9913,
+ "step": 1028
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8556138227334957e-05,
+ "loss": 0.9356,
+ "step": 1029
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855291074856336e-05,
+ "loss": 0.9394,
+ "step": 1030
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8549679947939778e-05,
+ "loss": 0.9326,
+ "step": 1031
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8546445826719023e-05,
+ "loss": 0.9758,
+ "step": 1032
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8543208386157195e-05,
+ "loss": 0.9908,
+ "step": 1033
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853996762751167e-05,
+ "loss": 0.9939,
+ "step": 1034
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8536723552041124e-05,
+ "loss": 0.9334,
+ "step": 1035
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853347616100552e-05,
+ "loss": 0.8662,
+ "step": 1036
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8530225455666103e-05,
+ "loss": 0.874,
+ "step": 1037
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8526971437285416e-05,
+ "loss": 0.9607,
+ "step": 1038
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8523714107127278e-05,
+ "loss": 0.9436,
+ "step": 1039
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520453466456797e-05,
+ "loss": 0.9564,
+ "step": 1040
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8517189516540376e-05,
+ "loss": 0.9919,
+ "step": 1041
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513922258645687e-05,
+ "loss": 0.9457,
+ "step": 1042
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8510651694041702e-05,
+ "loss": 0.9113,
+ "step": 1043
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8507377823998664e-05,
+ "loss": 0.889,
+ "step": 1044
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.850410064978811e-05,
+ "loss": 0.8957,
+ "step": 1045
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8500820172682858e-05,
+ "loss": 0.9713,
+ "step": 1046
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8497536393957005e-05,
+ "loss": 0.3168,
+ "step": 1047
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8494249314885932e-05,
+ "loss": 0.8657,
+ "step": 1048
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8490958936746304e-05,
+ "loss": 0.9021,
+ "step": 1049
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.848766526081607e-05,
+ "loss": 0.8838,
+ "step": 1050
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8484368288374452e-05,
+ "loss": 0.9475,
+ "step": 1051
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8481068020701954e-05,
+ "loss": 0.9845,
+ "step": 1052
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8477764459080364e-05,
+ "loss": 0.8957,
+ "step": 1053
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8474457604792746e-05,
+ "loss": 0.3215,
+ "step": 1054
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8471147459123447e-05,
+ "loss": 0.9442,
+ "step": 1055
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8467834023358088e-05,
+ "loss": 0.8866,
+ "step": 1056
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846451729878357e-05,
+ "loss": 0.9572,
+ "step": 1057
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.846119728668807e-05,
+ "loss": 0.9419,
+ "step": 1058
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845787398836104e-05,
+ "loss": 0.9387,
+ "step": 1059
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8454547405093212e-05,
+ "loss": 0.9377,
+ "step": 1060
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8451217538176597e-05,
+ "loss": 0.9553,
+ "step": 1061
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.844788438890447e-05,
+ "loss": 0.986,
+ "step": 1062
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444547958571396e-05,
+ "loss": 0.3361,
+ "step": 1063
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.84412082484732e-05,
+ "loss": 0.3385,
+ "step": 1064
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8437865259906987e-05,
+ "loss": 0.9415,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8434518994171136e-05,
+ "loss": 0.9397,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84311694525653e-05,
+ "loss": 0.9054,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84278166363904e-05,
+ "loss": 0.8955,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8424460546948632e-05,
+ "loss": 0.9017,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8421101185543463e-05,
+ "loss": 0.92,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841773855347963e-05,
+ "loss": 0.9055,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841437265206314e-05,
+ "loss": 0.8364,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841100348260127e-05,
+ "loss": 0.9515,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.840763104640257e-05,
+ "loss": 0.9436,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8404255344776853e-05,
+ "loss": 0.9395,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.84008763790352e-05,
+ "loss": 0.8926,
+ "step": 1076
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8397494150489963e-05,
+ "loss": 0.9672,
+ "step": 1077
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8394108660454766e-05,
+ "loss": 0.865,
+ "step": 1078
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8390719910244487e-05,
+ "loss": 0.9504,
+ "step": 1079
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8387327901175286e-05,
+ "loss": 0.941,
+ "step": 1080
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838393263456457e-05,
+ "loss": 0.9799,
+ "step": 1081
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838053411173103e-05,
+ "loss": 0.9256,
+ "step": 1082
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8377132333994606e-05,
+ "loss": 0.9193,
+ "step": 1083
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837372730267652e-05,
+ "loss": 0.8726,
+ "step": 1084
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8370319019099236e-05,
+ "loss": 0.9096,
+ "step": 1085
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8366907484586497e-05,
+ "loss": 0.9407,
+ "step": 1086
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83634927004633e-05,
+ "loss": 0.9167,
+ "step": 1087
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360074668055915e-05,
+ "loss": 0.9128,
+ "step": 1088
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8356653388691857e-05,
+ "loss": 0.8422,
+ "step": 1089
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353228863699922e-05,
+ "loss": 0.908,
+ "step": 1090
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8349801094410148e-05,
+ "loss": 0.8724,
+ "step": 1091
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346370082153843e-05,
+ "loss": 0.9003,
+ "step": 1092
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8342935828263574e-05,
+ "loss": 0.98,
+ "step": 1093
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339498334073166e-05,
+ "loss": 0.8614,
+ "step": 1094
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.83360576009177e-05,
+ "loss": 0.911,
+ "step": 1095
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.833261363013352e-05,
+ "loss": 0.8732,
+ "step": 1096
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832916642305822e-05,
+ "loss": 0.8753,
+ "step": 1097
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.832571598103066e-05,
+ "loss": 0.9483,
+ "step": 1098
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8322262305390948e-05,
+ "loss": 0.974,
+ "step": 1099
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8318805397480455e-05,
+ "loss": 0.9167,
+ "step": 1100
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8315345258641802e-05,
+ "loss": 0.9712,
+ "step": 1101
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8311881890218873e-05,
+ "loss": 0.9197,
+ "step": 1102
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830841529355679e-05,
+ "loss": 0.9173,
+ "step": 1103
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8304945470001948e-05,
+ "loss": 0.9408,
+ "step": 1104
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8301472420901985e-05,
+ "loss": 0.9391,
+ "step": 1105
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8297996147605787e-05,
+ "loss": 0.9482,
+ "step": 1106
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.829451665146351e-05,
+ "loss": 0.8962,
+ "step": 1107
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8291033933826535e-05,
+ "loss": 0.9761,
+ "step": 1108
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8287547996047523e-05,
+ "loss": 0.933,
+ "step": 1109
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8284058839480363e-05,
+ "loss": 0.8762,
+ "step": 1110
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8280566465480206e-05,
+ "loss": 0.9217,
+ "step": 1111
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8277070875403455e-05,
+ "loss": 0.3614,
+ "step": 1112
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8273572070607756e-05,
+ "loss": 0.9359,
+ "step": 1113
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8270070052451995e-05,
+ "loss": 0.9401,
+ "step": 1114
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8266564822296323e-05,
+ "loss": 0.9186,
+ "step": 1115
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.826305638150213e-05,
+ "loss": 0.321,
+ "step": 1116
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.825954473143205e-05,
+ "loss": 0.3378,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8256029873449976e-05,
+ "loss": 0.8492,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.825251180892103e-05,
+ "loss": 0.3327,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8248990539211596e-05,
+ "loss": 0.8799,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8245466065689282e-05,
+ "loss": 0.9734,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824193838972297e-05,
+ "loss": 0.9092,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823840751268275e-05,
+ "loss": 0.8317,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8234873435939987e-05,
+ "loss": 0.8746,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8231336160867275e-05,
+ "loss": 0.9396,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8227795688838446e-05,
+ "loss": 0.9045,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822425202122858e-05,
+ "loss": 0.9036,
+ "step": 1127
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8220705159413996e-05,
+ "loss": 0.8528,
+ "step": 1128
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8217155104772256e-05,
+ "loss": 0.9213,
+ "step": 1129
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8213601858682158e-05,
+ "loss": 0.4092,
+ "step": 1130
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8210045422523744e-05,
+ "loss": 0.9155,
+ "step": 1131
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8206485797678294e-05,
+ "loss": 0.3397,
+ "step": 1132
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820292298552832e-05,
+ "loss": 0.9601,
+ "step": 1133
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819935698745759e-05,
+ "loss": 0.8789,
+ "step": 1134
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8195787804851076e-05,
+ "loss": 0.9189,
+ "step": 1135
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8192215439095025e-05,
+ "loss": 0.8967,
+ "step": 1136
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8188639891576893e-05,
+ "loss": 0.8964,
+ "step": 1137
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8185061163685386e-05,
+ "loss": 0.8626,
+ "step": 1138
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.818147925681044e-05,
+ "loss": 0.9607,
+ "step": 1139
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8177894172343227e-05,
+ "loss": 0.9404,
+ "step": 1140
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.817430591167615e-05,
+ "loss": 0.9238,
+ "step": 1141
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8170714476202848e-05,
+ "loss": 0.9255,
+ "step": 1142
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8167119867318197e-05,
+ "loss": 0.3475,
+ "step": 1143
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81635220864183e-05,
+ "loss": 0.8938,
+ "step": 1144
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8159921134900486e-05,
+ "loss": 0.9342,
+ "step": 1145
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156317014163337e-05,
+ "loss": 0.9245,
+ "step": 1146
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8152709725606642e-05,
+ "loss": 0.9467,
+ "step": 1147
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149099270631434e-05,
+ "loss": 0.8735,
+ "step": 1148
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8145485650639973e-05,
+ "loss": 0.3534,
+ "step": 1149
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8141868867035745e-05,
+ "loss": 0.9545,
+ "step": 1150
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8138248921223465e-05,
+ "loss": 0.8612,
+ "step": 1151
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134625814609084e-05,
+ "loss": 0.3531,
+ "step": 1152
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8130999548599767e-05,
+ "loss": 0.9884,
+ "step": 1153
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8127370124603927e-05,
+ "loss": 0.938,
+ "step": 1154
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8123737544031178e-05,
+ "loss": 0.9063,
+ "step": 1155
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8120101808292373e-05,
+ "loss": 0.9163,
+ "step": 1156
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.81164629187996e-05,
+ "loss": 0.9941,
+ "step": 1157
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811282087696615e-05,
+ "loss": 0.8835,
+ "step": 1158
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8109175684206558e-05,
+ "loss": 0.8915,
+ "step": 1159
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8105527341936574e-05,
+ "loss": 0.9839,
+ "step": 1160
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.810187585157317e-05,
+ "loss": 0.3224,
+ "step": 1161
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8098221214534543e-05,
+ "loss": 0.307,
+ "step": 1162
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8094563432240107e-05,
+ "loss": 0.9391,
+ "step": 1163
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8090902506110513e-05,
+ "loss": 0.9469,
+ "step": 1164
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8087238437567614e-05,
+ "loss": 0.9498,
+ "step": 1165
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8083571228034498e-05,
+ "loss": 0.912,
+ "step": 1166
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807990087893546e-05,
+ "loss": 0.8633,
+ "step": 1167
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807622739169603e-05,
+ "loss": 0.952,
+ "step": 1168
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.807255076774294e-05,
+ "loss": 0.3537,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8068871008504153e-05,
+ "loss": 0.9312,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065188115408844e-05,
+ "loss": 1.0083,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8061502089887406e-05,
+ "loss": 0.9158,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805781293337145e-05,
+ "loss": 0.8719,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8054120647293798e-05,
+ "loss": 0.8885,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050425233088496e-05,
+ "loss": 0.3342,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.80467266921908e-05,
+ "loss": 0.8902,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8043025026037178e-05,
+ "loss": 0.8875,
+ "step": 1177
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8039320236065314e-05,
+ "loss": 0.9133,
+ "step": 1178
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803561232371411e-05,
+ "loss": 0.9502,
+ "step": 1179
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803190129042367e-05,
+ "loss": 0.9052,
+ "step": 1180
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8028187137635325e-05,
+ "loss": 0.9564,
+ "step": 1181
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8024469866791602e-05,
+ "loss": 0.9611,
+ "step": 1182
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.802074947933625e-05,
+ "loss": 0.9002,
+ "step": 1183
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801702597671422e-05,
+ "loss": 0.3337,
+ "step": 1184
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8013299360371685e-05,
+ "loss": 0.8692,
+ "step": 1185
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8009569631756013e-05,
+ "loss": 0.9799,
+ "step": 1186
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8005836792315793e-05,
+ "loss": 0.8318,
+ "step": 1187
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.800210084350081e-05,
+ "loss": 0.9676,
+ "step": 1188
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799836178676207e-05,
+ "loss": 0.9258,
+ "step": 1189
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799461962355178e-05,
+ "loss": 0.9595,
+ "step": 1190
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7990874355323345e-05,
+ "loss": 0.9038,
+ "step": 1191
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7987125983531393e-05,
+ "loss": 0.8838,
+ "step": 1192
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7983374509631742e-05,
+ "loss": 0.8658,
+ "step": 1193
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7979619935081424e-05,
+ "loss": 0.9302,
+ "step": 1194
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.797586226133867e-05,
+ "loss": 0.8896,
+ "step": 1195
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7972101489862924e-05,
+ "loss": 0.9137,
+ "step": 1196
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7968337622114824e-05,
+ "loss": 0.8774,
+ "step": 1197
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7964570659556206e-05,
+ "loss": 0.9242,
+ "step": 1198
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796080060365012e-05,
+ "loss": 0.9346,
+ "step": 1199
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7957027455860815e-05,
+ "loss": 0.944,
+ "step": 1200
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795325121765373e-05,
+ "loss": 0.8704,
+ "step": 1201
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.794947189049552e-05,
+ "loss": 0.3457,
+ "step": 1202
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7945689475854033e-05,
+ "loss": 0.9214,
+ "step": 1203
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941903975198305e-05,
+ "loss": 0.8942,
+ "step": 1204
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7938115389998595e-05,
+ "loss": 0.931,
+ "step": 1205
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7934323721726334e-05,
+ "loss": 0.9481,
+ "step": 1206
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7930528971854166e-05,
+ "loss": 0.951,
+ "step": 1207
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792673114185593e-05,
+ "loss": 0.9125,
+ "step": 1208
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7922930233206656e-05,
+ "loss": 0.937,
+ "step": 1209
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7919126247382576e-05,
+ "loss": 0.9068,
+ "step": 1210
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.791531918586112e-05,
+ "loss": 0.896,
+ "step": 1211
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7911509050120892e-05,
+ "loss": 0.97,
+ "step": 1212
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7907695841641716e-05,
+ "loss": 0.892,
+ "step": 1213
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7903879561904597e-05,
+ "loss": 0.9074,
+ "step": 1214
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.790006021239173e-05,
+ "loss": 0.9188,
+ "step": 1215
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789623779458651e-05,
+ "loss": 0.3391,
+ "step": 1216
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.789241230997352e-05,
+ "loss": 1.015,
+ "step": 1217
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7888583760038534e-05,
+ "loss": 0.8729,
+ "step": 1218
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7884752146268513e-05,
+ "loss": 0.8719,
+ "step": 1219
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7880917470151614e-05,
+ "loss": 0.9325,
+ "step": 1220
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7877079733177185e-05,
+ "loss": 0.9018,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7873238936835754e-05,
+ "loss": 0.8804,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786939508261904e-05,
+ "loss": 0.9016,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786554817201996e-05,
+ "loss": 0.9105,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.78616982065326e-05,
+ "loss": 0.3369,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.785784518765225e-05,
+ "loss": 0.8996,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7853989116875373e-05,
+ "loss": 0.3376,
+ "step": 1227
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7850129995699626e-05,
+ "loss": 0.8681,
+ "step": 1228
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846267825623843e-05,
+ "loss": 0.9937,
+ "step": 1229
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7842402608148053e-05,
+ "loss": 0.9595,
+ "step": 1230
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838534344773453e-05,
+ "loss": 0.8766,
+ "step": 1231
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7834663037002444e-05,
+ "loss": 0.921,
+ "step": 1232
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830788686338586e-05,
+ "loss": 0.9094,
+ "step": 1233
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7826911294286636e-05,
+ "loss": 0.8882,
+ "step": 1234
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.782303086235253e-05,
+ "loss": 0.9083,
+ "step": 1235
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781914739204338e-05,
+ "loss": 0.7964,
+ "step": 1236
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7815260884867486e-05,
+ "loss": 0.9136,
+ "step": 1237
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.781137134233432e-05,
+ "loss": 0.9764,
+ "step": 1238
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7807478765954532e-05,
+ "loss": 0.9434,
+ "step": 1239
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7803583157239958e-05,
+ "loss": 0.9205,
+ "step": 1240
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7799684517703605e-05,
+ "loss": 0.8635,
+ "step": 1241
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779578284885966e-05,
+ "loss": 0.3322,
+ "step": 1242
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.779187815222349e-05,
+ "loss": 0.8767,
+ "step": 1243
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778797042931163e-05,
+ "loss": 0.902,
+ "step": 1244
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7784059681641798e-05,
+ "loss": 0.9385,
+ "step": 1245
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778014591073288e-05,
+ "loss": 0.934,
+ "step": 1246
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777622911810494e-05,
+ "loss": 0.8906,
+ "step": 1247
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777230930527922e-05,
+ "loss": 0.8875,
+ "step": 1248
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7768386473778124e-05,
+ "loss": 0.9189,
+ "step": 1249
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7764460625125236e-05,
+ "loss": 0.8831,
+ "step": 1250
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776053176084531e-05,
+ "loss": 0.9489,
+ "step": 1251
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7756599882464274e-05,
+ "loss": 0.9754,
+ "step": 1252
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7752664991509224e-05,
+ "loss": 0.8612,
+ "step": 1253
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7748727089508423e-05,
+ "loss": 0.8817,
+ "step": 1254
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7744786177991307e-05,
+ "loss": 0.3096,
+ "step": 1255
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.774084225848849e-05,
+ "loss": 0.8883,
+ "step": 1256
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.773689533253173e-05,
+ "loss": 0.8886,
+ "step": 1257
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732945401653978e-05,
+ "loss": 0.8891,
+ "step": 1258
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7728992467389342e-05,
+ "loss": 0.9199,
+ "step": 1259
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7725036531273087e-05,
+ "loss": 0.9217,
+ "step": 1260
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7721077594841663e-05,
+ "loss": 0.9471,
+ "step": 1261
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.771711565963267e-05,
+ "loss": 0.8964,
+ "step": 1262
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7713150727184878e-05,
+ "loss": 0.9179,
+ "step": 1263
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770918279903822e-05,
+ "loss": 0.8938,
+ "step": 1264
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.77052118767338e-05,
+ "loss": 0.8972,
+ "step": 1265
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7701237961813874e-05,
+ "loss": 0.9037,
+ "step": 1266
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7697261055821864e-05,
+ "loss": 0.9953,
+ "step": 1267
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7693281160302354e-05,
+ "loss": 0.916,
+ "step": 1268
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7689298276801095e-05,
+ "loss": 0.8463,
+ "step": 1269
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7685312406864986e-05,
+ "loss": 0.9613,
+ "step": 1270
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7681323552042094e-05,
+ "loss": 0.34,
+ "step": 1271
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.767733171388165e-05,
+ "loss": 0.9194,
+ "step": 1272
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7673336893934033e-05,
+ "loss": 0.8789,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7669339093750786e-05,
+ "loss": 0.872,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.766533831488461e-05,
+ "loss": 0.9512,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7661334558889357e-05,
+ "loss": 0.8791,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7657327827320046e-05,
+ "loss": 0.8505,
+ "step": 1277
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765331812173284e-05,
+ "loss": 0.9258,
+ "step": 1278
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7649305443685068e-05,
+ "loss": 0.8811,
+ "step": 1279
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76452897947352e-05,
+ "loss": 0.8763,
+ "step": 1280
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7641271176442876e-05,
+ "loss": 0.8905,
+ "step": 1281
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7637249590368878e-05,
+ "loss": 0.9268,
+ "step": 1282
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763322503807514e-05,
+ "loss": 0.9025,
+ "step": 1283
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7629197521124758e-05,
+ "loss": 0.8868,
+ "step": 1284
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7625167041081967e-05,
+ "loss": 0.9169,
+ "step": 1285
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7621133599512163e-05,
+ "loss": 0.8898,
+ "step": 1286
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761709719798189e-05,
+ "loss": 0.9608,
+ "step": 1287
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.761305783805883e-05,
+ "loss": 0.9333,
+ "step": 1288
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7609015521311836e-05,
+ "loss": 0.3426,
+ "step": 1289
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7604970249310893e-05,
+ "loss": 0.8983,
+ "step": 1290
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600922023627137e-05,
+ "loss": 0.8992,
+ "step": 1291
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759687084583285e-05,
+ "loss": 0.927,
+ "step": 1292
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759281671750147e-05,
+ "loss": 0.9638,
+ "step": 1293
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7588759640207564e-05,
+ "loss": 0.3531,
+ "step": 1294
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7584699615526857e-05,
+ "loss": 0.9061,
+ "step": 1295
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7580636645036224e-05,
+ "loss": 0.9489,
+ "step": 1296
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757657073031367e-05,
+ "loss": 0.985,
+ "step": 1297
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7572501872938343e-05,
+ "loss": 0.3491,
+ "step": 1298
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.756843007449055e-05,
+ "loss": 0.9456,
+ "step": 1299
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7564355336551727e-05,
+ "loss": 0.8545,
+ "step": 1300
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7560277660704455e-05,
+ "loss": 0.3479,
+ "step": 1301
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.755619704853246e-05,
+ "loss": 0.8946,
+ "step": 1302
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7552113501620595e-05,
+ "loss": 0.8977,
+ "step": 1303
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7548027021554874e-05,
+ "loss": 0.9164,
+ "step": 1304
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.754393760992243e-05,
+ "loss": 0.9089,
+ "step": 1305
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7539845268311548e-05,
+ "loss": 0.9209,
+ "step": 1306
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7535749998311645e-05,
+ "loss": 0.9072,
+ "step": 1307
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.753165180151328e-05,
+ "loss": 0.3462,
+ "step": 1308
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752755067950814e-05,
+ "loss": 0.9508,
+ "step": 1309
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752344663388906e-05,
+ "loss": 0.9197,
+ "step": 1310
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7519339666249997e-05,
+ "loss": 0.8936,
+ "step": 1311
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515229778186052e-05,
+ "loss": 0.897,
+ "step": 1312
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7511116971293463e-05,
+ "loss": 0.8872,
+ "step": 1313
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7507001247169587e-05,
+ "loss": 0.9111,
+ "step": 1314
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7502882607412933e-05,
+ "loss": 0.9244,
+ "step": 1315
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749876105362313e-05,
+ "loss": 0.9142,
+ "step": 1316
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7494636587400942e-05,
+ "loss": 0.9361,
+ "step": 1317
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.749050921034826e-05,
+ "loss": 0.9259,
+ "step": 1318
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7486378924068123e-05,
+ "loss": 0.9613,
+ "step": 1319
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.748224573016467e-05,
+ "loss": 0.9206,
+ "step": 1320
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7478109630243195e-05,
+ "loss": 0.8995,
+ "step": 1321
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.747397062591011e-05,
+ "loss": 0.8912,
+ "step": 1322
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.746982871877296e-05,
+ "loss": 0.9132,
+ "step": 1323
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7465683910440405e-05,
+ "loss": 0.3367,
+ "step": 1324
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7461536202522248e-05,
+ "loss": 0.8463,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.745738559662941e-05,
+ "loss": 0.8797,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7453232094373936e-05,
+ "loss": 0.9016,
+ "step": 1327
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7449075697369005e-05,
+ "loss": 0.9495,
+ "step": 1328
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7444916407228904e-05,
+ "loss": 0.8987,
+ "step": 1329
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.744075422556906e-05,
+ "loss": 0.8992,
+ "step": 1330
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7436589154006014e-05,
+ "loss": 0.991,
+ "step": 1331
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.743242119415743e-05,
+ "loss": 0.8775,
+ "step": 1332
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7428250347642102e-05,
+ "loss": 0.8728,
+ "step": 1333
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7424076616079933e-05,
+ "loss": 0.9342,
+ "step": 1334
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7419900001091953e-05,
+ "loss": 0.9245,
+ "step": 1335
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7415720504300314e-05,
+ "loss": 0.9226,
+ "step": 1336
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.741153812732828e-05,
+ "loss": 0.9372,
+ "step": 1337
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7407352871800246e-05,
+ "loss": 0.8907,
+ "step": 1338
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403164739341708e-05,
+ "loss": 0.911,
+ "step": 1339
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739897373157929e-05,
+ "loss": 0.8968,
+ "step": 1340
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394779850140736e-05,
+ "loss": 0.938,
+ "step": 1341
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7390583096654895e-05,
+ "loss": 0.8963,
+ "step": 1342
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7386383472751745e-05,
+ "loss": 0.9017,
+ "step": 1343
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7382180980062365e-05,
+ "loss": 0.9331,
+ "step": 1344
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377975620218954e-05,
+ "loss": 0.9107,
+ "step": 1345
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7373767394854836e-05,
+ "loss": 0.9292,
+ "step": 1346
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7369556305604422e-05,
+ "loss": 0.9282,
+ "step": 1347
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.736534235410326e-05,
+ "loss": 0.8801,
+ "step": 1348
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7361125541988e-05,
+ "loss": 0.8528,
+ "step": 1349
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7356905870896407e-05,
+ "loss": 0.9513,
+ "step": 1350
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.735268334246734e-05,
+ "loss": 0.878,
+ "step": 1351
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7348457958340792e-05,
+ "loss": 0.3268,
+ "step": 1352
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7344229720157846e-05,
+ "loss": 0.8879,
+ "step": 1353
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7339998629560705e-05,
+ "loss": 0.9062,
+ "step": 1354
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7335764688192676e-05,
+ "loss": 0.9827,
+ "step": 1355
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733152789769817e-05,
+ "loss": 1.0078,
+ "step": 1356
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7327288259722714e-05,
+ "loss": 0.9629,
+ "step": 1357
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7323045775912927e-05,
+ "loss": 0.9111,
+ "step": 1358
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7318800447916543e-05,
+ "loss": 0.94,
+ "step": 1359
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7314552277382403e-05,
+ "loss": 0.9217,
+ "step": 1360
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7310301265960446e-05,
+ "loss": 0.8825,
+ "step": 1361
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7306047415301706e-05,
+ "loss": 0.8768,
+ "step": 1362
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7301790727058344e-05,
+ "loss": 0.8997,
+ "step": 1363
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7297531202883598e-05,
+ "loss": 0.9012,
+ "step": 1364
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7293268844431826e-05,
+ "loss": 0.8939,
+ "step": 1365
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289003653358472e-05,
+ "loss": 0.9494,
+ "step": 1366
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7284735631320093e-05,
+ "loss": 0.8624,
+ "step": 1367
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280464779974335e-05,
+ "loss": 0.9329,
+ "step": 1368
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7276191100979952e-05,
+ "loss": 0.8958,
+ "step": 1369
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271914595996784e-05,
+ "loss": 0.8791,
+ "step": 1370
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7267635266685782e-05,
+ "loss": 0.9476,
+ "step": 1371
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7263353114708993e-05,
+ "loss": 0.963,
+ "step": 1372
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7259068141729542e-05,
+ "loss": 0.8908,
+ "step": 1373
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7254780349411677e-05,
+ "loss": 0.3562,
+ "step": 1374
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7250489739420718e-05,
+ "loss": 0.969,
+ "step": 1375
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7246196313423095e-05,
+ "loss": 0.3459,
+ "step": 1376
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7241900073086318e-05,
+ "loss": 0.9044,
+ "step": 1377
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7237601020079003e-05,
+ "loss": 0.8814,
+ "step": 1378
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7233299156070852e-05,
+ "loss": 0.9551,
+ "step": 1379
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7228994482732653e-05,
+ "loss": 0.9468,
+ "step": 1380
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.72246870017363e-05,
+ "loss": 0.9222,
+ "step": 1381
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7220376714754766e-05,
+ "loss": 0.8468,
+ "step": 1382
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7216063623462112e-05,
+ "loss": 0.8935,
+ "step": 1383
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7211747729533504e-05,
+ "loss": 0.9338,
+ "step": 1384
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7207429034645176e-05,
+ "loss": 0.8834,
+ "step": 1385
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720310754047446e-05,
+ "loss": 0.9381,
+ "step": 1386
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719878324869978e-05,
+ "loss": 0.896,
+ "step": 1387
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7194456161000634e-05,
+ "loss": 0.9337,
+ "step": 1388
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.719012627905762e-05,
+ "loss": 0.8796,
+ "step": 1389
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.718579360455241e-05,
+ "loss": 0.3521,
+ "step": 1390
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7181458139167767e-05,
+ "loss": 0.8592,
+ "step": 1391
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7177119884587536e-05,
+ "loss": 0.8868,
+ "step": 1392
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717277884249664e-05,
+ "loss": 0.9154,
+ "step": 1393
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716843501458109e-05,
+ "loss": 0.3666,
+ "step": 1394
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716408840252799e-05,
+ "loss": 0.895,
+ "step": 1395
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7159739008025503e-05,
+ "loss": 0.9729,
+ "step": 1396
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155386832762892e-05,
+ "loss": 0.3205,
+ "step": 1397
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.715103187843048e-05,
+ "loss": 0.945,
+ "step": 1398
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146674146719688e-05,
+ "loss": 0.9108,
+ "step": 1399
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7142313639323012e-05,
+ "loss": 0.8483,
+ "step": 1400
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137950357934017e-05,
+ "loss": 0.9048,
+ "step": 1401
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7133584304247354e-05,
+ "loss": 0.8964,
+ "step": 1402
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7129215479958747e-05,
+ "loss": 0.8927,
+ "step": 1403
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7124843886765e-05,
+ "loss": 0.898,
+ "step": 1404
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712046952636398e-05,
+ "loss": 0.8874,
+ "step": 1405
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7116092400454655e-05,
+ "loss": 0.8855,
+ "step": 1406
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7111712510737035e-05,
+ "loss": 0.8747,
+ "step": 1407
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7107329858912226e-05,
+ "loss": 0.8969,
+ "step": 1408
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7102944446682393e-05,
+ "loss": 0.9312,
+ "step": 1409
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.709855627575079e-05,
+ "loss": 0.9206,
+ "step": 1410
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7094165347821724e-05,
+ "loss": 0.9285,
+ "step": 1411
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7089771664600584e-05,
+ "loss": 0.872,
+ "step": 1412
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.708537522779382e-05,
+ "loss": 0.9077,
+ "step": 1413
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7080976039108964e-05,
+ "loss": 0.3426,
+ "step": 1414
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7076574100254614e-05,
+ "loss": 0.9247,
+ "step": 1415
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707216941294042e-05,
+ "loss": 0.8963,
+ "step": 1416
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.706776197887712e-05,
+ "loss": 0.8499,
+ "step": 1417
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7063351799776514e-05,
+ "loss": 0.9228,
+ "step": 1418
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7058938877351456e-05,
+ "loss": 0.887,
+ "step": 1419
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.705452321331588e-05,
+ "loss": 0.8649,
+ "step": 1420
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7050104809384774e-05,
+ "loss": 0.302,
+ "step": 1421
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70456836672742e-05,
+ "loss": 0.876,
+ "step": 1422
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.704125978870128e-05,
+ "loss": 0.8863,
+ "step": 1423
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7036833175384192e-05,
+ "loss": 0.8995,
+ "step": 1424
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7032403829042182e-05,
+ "loss": 0.3592,
+ "step": 1425
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7027971751395563e-05,
+ "loss": 0.3541,
+ "step": 1426
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7023536944165697e-05,
+ "loss": 0.9257,
+ "step": 1427
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7019099409075014e-05,
+ "loss": 0.8985,
+ "step": 1428
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7014659147847005e-05,
+ "loss": 0.9004,
+ "step": 1429
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701021616220621e-05,
+ "loss": 0.9354,
+ "step": 1430
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7005770453878234e-05,
+ "loss": 0.978,
+ "step": 1431
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7001322024589742e-05,
+ "loss": 0.9114,
+ "step": 1432
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6996870876068455e-05,
+ "loss": 0.9199,
+ "step": 1433
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6992417010043144e-05,
+ "loss": 0.8406,
+ "step": 1434
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6987960428243637e-05,
+ "loss": 0.8679,
+ "step": 1435
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6983501132400825e-05,
+ "loss": 0.9248,
+ "step": 1436
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6979039124246643e-05,
+ "loss": 1.0086,
+ "step": 1437
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6974574405514083e-05,
+ "loss": 0.3541,
+ "step": 1438
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6970106977937192e-05,
+ "loss": 0.9326,
+ "step": 1439
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.696563684325107e-05,
+ "loss": 0.3749,
+ "step": 1440
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6961164003191862e-05,
+ "loss": 0.9212,
+ "step": 1441
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6956688459496767e-05,
+ "loss": 0.9714,
+ "step": 1442
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695221021390404e-05,
+ "loss": 0.8775,
+ "step": 1443
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6947729268152972e-05,
+ "loss": 0.9413,
+ "step": 1444
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6943245623983918e-05,
+ "loss": 0.97,
+ "step": 1445
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6938759283138268e-05,
+ "loss": 0.8966,
+ "step": 1446
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.693427024735847e-05,
+ "loss": 0.9482,
+ "step": 1447
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.692977851838801e-05,
+ "loss": 0.9045,
+ "step": 1448
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925284097971427e-05,
+ "loss": 0.9114,
+ "step": 1449
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6920786987854296e-05,
+ "loss": 0.9462,
+ "step": 1450
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691628718978325e-05,
+ "loss": 0.9369,
+ "step": 1451
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.691178470550596e-05,
+ "loss": 0.9344,
+ "step": 1452
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6907279536771127e-05,
+ "loss": 0.9304,
+ "step": 1453
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6902771685328524e-05,
+ "loss": 0.8722,
+ "step": 1454
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6898261152928933e-05,
+ "loss": 0.9627,
+ "step": 1455
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6893747941324197e-05,
+ "loss": 0.9426,
+ "step": 1456
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6889232052267203e-05,
+ "loss": 0.883,
+ "step": 1457
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688471348751186e-05,
+ "loss": 0.8001,
+ "step": 1458
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.688019224881313e-05,
+ "loss": 0.9631,
+ "step": 1459
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6875668337927014e-05,
+ "loss": 0.8921,
+ "step": 1460
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6871141756610544e-05,
+ "loss": 0.8152,
+ "step": 1461
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6866612506621788e-05,
+ "loss": 0.8897,
+ "step": 1462
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6862080589719863e-05,
+ "loss": 0.8766,
+ "step": 1463
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6857546007664908e-05,
+ "loss": 0.9122,
+ "step": 1464
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6853008762218103e-05,
+ "loss": 0.9693,
+ "step": 1465
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684846885514166e-05,
+ "loss": 0.9323,
+ "step": 1466
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6843926288198828e-05,
+ "loss": 0.3472,
+ "step": 1467
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683938106315389e-05,
+ "loss": 0.8746,
+ "step": 1468
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.683483318177216e-05,
+ "loss": 0.3501,
+ "step": 1469
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6830282645819974e-05,
+ "loss": 0.9011,
+ "step": 1470
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6825729457064718e-05,
+ "loss": 0.9537,
+ "step": 1471
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6821173617274793e-05,
+ "loss": 0.8521,
+ "step": 1472
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6816615128219635e-05,
+ "loss": 0.8628,
+ "step": 1473
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.681205399166971e-05,
+ "loss": 0.876,
+ "step": 1474
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6807490209396506e-05,
+ "loss": 0.8959,
+ "step": 1475
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6802923783172553e-05,
+ "loss": 0.9094,
+ "step": 1476
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.679835471477139e-05,
+ "loss": 0.8905,
+ "step": 1477
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793783005967593e-05,
+ "loss": 0.9005,
+ "step": 1478
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678920865853676e-05,
+ "loss": 0.3728,
+ "step": 1479
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.678463167425552e-05,
+ "loss": 0.9065,
+ "step": 1480
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6780052054901512e-05,
+ "loss": 0.8991,
+ "step": 1481
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6775469802253416e-05,
+ "loss": 0.8868,
+ "step": 1482
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6770884918090923e-05,
+ "loss": 0.8659,
+ "step": 1483
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6766297404194745e-05,
+ "loss": 0.9002,
+ "step": 1484
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6761707262346624e-05,
+ "loss": 0.8773,
+ "step": 1485
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675711449432932e-05,
+ "loss": 0.9197,
+ "step": 1486
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6752519101926606e-05,
+ "loss": 0.8743,
+ "step": 1487
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6747921086923284e-05,
+ "loss": 0.8821,
+ "step": 1488
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674332045110517e-05,
+ "loss": 0.9071,
+ "step": 1489
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6738717196259092e-05,
+ "loss": 0.9039,
+ "step": 1490
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673411132417291e-05,
+ "loss": 0.9037,
+ "step": 1491
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672950283663548e-05,
+ "loss": 0.9522,
+ "step": 1492
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6724891735436697e-05,
+ "loss": 0.9196,
+ "step": 1493
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6720278022367453e-05,
+ "loss": 0.8583,
+ "step": 1494
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6715661699219664e-05,
+ "loss": 0.9125,
+ "step": 1495
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6711042767786257e-05,
+ "loss": 0.8885,
+ "step": 1496
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6706421229861168e-05,
+ "loss": 0.8732,
+ "step": 1497
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6701797087239354e-05,
+ "loss": 0.351,
+ "step": 1498
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6697170341716772e-05,
+ "loss": 0.8356,
+ "step": 1499
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6692540995090403e-05,
+ "loss": 0.9195,
+ "step": 1500
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.668790904915823e-05,
+ "loss": 0.8917,
+ "step": 1501
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6683274505719248e-05,
+ "loss": 0.8966,
+ "step": 1502
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6678637366573455e-05,
+ "loss": 0.9046,
+ "step": 1503
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.667399763352187e-05,
+ "loss": 0.9207,
+ "step": 1504
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.666935530836651e-05,
+ "loss": 0.9575,
+ "step": 1505
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6664710392910396e-05,
+ "loss": 0.815,
+ "step": 1506
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6660062888957564e-05,
+ "loss": 0.9449,
+ "step": 1507
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665541279831305e-05,
+ "loss": 0.8779,
+ "step": 1508
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650760122782898e-05,
+ "loss": 0.8118,
+ "step": 1509
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6646104864174147e-05,
+ "loss": 0.9139,
+ "step": 1510
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664144702429485e-05,
+ "loss": 0.9026,
+ "step": 1511
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663678660495406e-05,
+ "loss": 0.8231,
+ "step": 1512
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663212360796183e-05,
+ "loss": 0.926,
+ "step": 1513
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662745803512921e-05,
+ "loss": 0.9112,
+ "step": 1514
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662278988826826e-05,
+ "loss": 0.9311,
+ "step": 1515
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6618119169192027e-05,
+ "loss": 0.9262,
+ "step": 1516
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.661344587971457e-05,
+ "loss": 0.985,
+ "step": 1517
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6608770021650945e-05,
+ "loss": 0.9365,
+ "step": 1518
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6604091596817193e-05,
+ "loss": 0.904,
+ "step": 1519
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6599410607030363e-05,
+ "loss": 0.9535,
+ "step": 1520
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6594727054108498e-05,
+ "loss": 0.9517,
+ "step": 1521
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.659004093987064e-05,
+ "loss": 0.9498,
+ "step": 1522
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6585352266136814e-05,
+ "loss": 0.9883,
+ "step": 1523
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6580661034728055e-05,
+ "loss": 0.8979,
+ "step": 1524
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6575967247466376e-05,
+ "loss": 0.8531,
+ "step": 1525
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.657127090617479e-05,
+ "loss": 0.8912,
+ "step": 1526
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656657201267731e-05,
+ "loss": 0.9086,
+ "step": 1527
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6561870568798927e-05,
+ "loss": 0.9344,
+ "step": 1528
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.655716657636562e-05,
+ "loss": 0.8746,
+ "step": 1529
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6552460037204382e-05,
+ "loss": 0.9204,
+ "step": 1530
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6547750953143168e-05,
+ "loss": 0.8635,
+ "step": 1531
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.654303932601093e-05,
+ "loss": 0.921,
+ "step": 1532
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6538325157637614e-05,
+ "loss": 0.9079,
+ "step": 1533
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.653360844985415e-05,
+ "loss": 0.3378,
+ "step": 1534
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.652888920449245e-05,
+ "loss": 0.9322,
+ "step": 1535
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6524167423385414e-05,
+ "loss": 0.9674,
+ "step": 1536
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.651944310836693e-05,
+ "loss": 0.9316,
+ "step": 1537
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6514716261271866e-05,
+ "loss": 0.8693,
+ "step": 1538
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6509986883936073e-05,
+ "loss": 0.8552,
+ "step": 1539
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.650525497819639e-05,
+ "loss": 0.3403,
+ "step": 1540
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6500520545890634e-05,
+ "loss": 0.9273,
+ "step": 1541
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6495783588857605e-05,
+ "loss": 0.9329,
+ "step": 1542
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.649104410893708e-05,
+ "loss": 0.334,
+ "step": 1543
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.648630210796982e-05,
+ "loss": 0.8976,
+ "step": 1544
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6481557587797562e-05,
+ "loss": 0.9389,
+ "step": 1545
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6476810550263023e-05,
+ "loss": 0.9542,
+ "step": 1546
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6472060997209898e-05,
+ "loss": 0.8812,
+ "step": 1547
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6467308930482863e-05,
+ "loss": 0.9048,
+ "step": 1548
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6462554351927558e-05,
+ "loss": 0.876,
+ "step": 1549
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6457797263390613e-05,
+ "loss": 0.8298,
+ "step": 1550
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6453037666719624e-05,
+ "loss": 0.9022,
+ "step": 1551
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6448275563763162e-05,
+ "loss": 0.8829,
+ "step": 1552
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.644351095637078e-05,
+ "loss": 0.8775,
+ "step": 1553
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6438743846392987e-05,
+ "loss": 0.9378,
+ "step": 1554
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6433974235681274e-05,
+ "loss": 0.9564,
+ "step": 1555
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6429202126088112e-05,
+ "loss": 0.8495,
+ "step": 1556
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6424427519466925e-05,
+ "loss": 0.9205,
+ "step": 1557
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641965041767212e-05,
+ "loss": 0.8454,
+ "step": 1558
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6414870822559064e-05,
+ "loss": 0.8951,
+ "step": 1559
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6410088735984103e-05,
+ "loss": 0.9077,
+ "step": 1560
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405304159804534e-05,
+ "loss": 0.931,
+ "step": 1561
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6400517095878644e-05,
+ "loss": 0.9085,
+ "step": 1562
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6395727546065665e-05,
+ "loss": 0.9335,
+ "step": 1563
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6390935512225806e-05,
+ "loss": 0.9383,
+ "step": 1564
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6386140996220232e-05,
+ "loss": 0.9486,
+ "step": 1565
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6381343999911088e-05,
+ "loss": 0.9645,
+ "step": 1566
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6376544525161463e-05,
+ "loss": 0.942,
+ "step": 1567
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6371742573835426e-05,
+ "loss": 0.8543,
+ "step": 1568
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.636693814779799e-05,
+ "loss": 0.9479,
+ "step": 1569
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6362131248915145e-05,
+ "loss": 0.8565,
+ "step": 1570
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6357321879053833e-05,
+ "loss": 0.9377,
+ "step": 1571
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6352510040081962e-05,
+ "loss": 0.9424,
+ "step": 1572
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634769573386839e-05,
+ "loss": 0.8644,
+ "step": 1573
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.634287896228294e-05,
+ "loss": 0.8406,
+ "step": 1574
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6338059727196386e-05,
+ "loss": 0.9085,
+ "step": 1575
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6333238030480473e-05,
+ "loss": 0.9058,
+ "step": 1576
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6328413874007884e-05,
+ "loss": 0.942,
+ "step": 1577
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6323587259652267e-05,
+ "loss": 0.8775,
+ "step": 1578
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6318758189288227e-05,
+ "loss": 0.9459,
+ "step": 1579
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6313926664791316e-05,
+ "loss": 0.9458,
+ "step": 1580
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6309092688038047e-05,
+ "loss": 0.9369,
+ "step": 1581
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6304256260905872e-05,
+ "loss": 0.917,
+ "step": 1582
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6299417385273216e-05,
+ "loss": 0.9081,
+ "step": 1583
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.629457606301943e-05,
+ "loss": 0.3765,
+ "step": 1584
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6289732296024837e-05,
+ "loss": 0.9892,
+ "step": 1585
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6284886086170697e-05,
+ "loss": 0.9082,
+ "step": 1586
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.628003743533922e-05,
+ "loss": 0.3439,
+ "step": 1587
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6275186345413566e-05,
+ "loss": 0.9259,
+ "step": 1588
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627033281827785e-05,
+ "loss": 0.362,
+ "step": 1589
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265476855817116e-05,
+ "loss": 0.8515,
+ "step": 1590
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6260618459917366e-05,
+ "loss": 0.9138,
+ "step": 1591
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255757632465553e-05,
+ "loss": 0.9615,
+ "step": 1592
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.625089437534956e-05,
+ "loss": 0.9091,
+ "step": 1593
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624602869045822e-05,
+ "loss": 0.9077,
+ "step": 1594
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.624116057968131e-05,
+ "loss": 0.9218,
+ "step": 1595
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6236290044909543e-05,
+ "loss": 1.0128,
+ "step": 1596
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6231417088034585e-05,
+ "loss": 0.9007,
+ "step": 1597
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.622654171094904e-05,
+ "loss": 0.3385,
+ "step": 1598
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6221663915546437e-05,
+ "loss": 0.9356,
+ "step": 1599
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6216783703721265e-05,
+ "loss": 0.9317,
+ "step": 1600
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6211901077368937e-05,
+ "loss": 0.8909,
+ "step": 1601
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.620701603838581e-05,
+ "loss": 0.9236,
+ "step": 1602
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6202128588669177e-05,
+ "loss": 0.8958,
+ "step": 1603
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619723873011727e-05,
+ "loss": 0.866,
+ "step": 1604
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6192346464629247e-05,
+ "loss": 0.8925,
+ "step": 1605
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6187451794105212e-05,
+ "loss": 0.851,
+ "step": 1606
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61825547204462e-05,
+ "loss": 0.879,
+ "step": 1607
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6177655245554177e-05,
+ "loss": 0.8873,
+ "step": 1608
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617275337133204e-05,
+ "loss": 0.8709,
+ "step": 1609
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6167849099683623e-05,
+ "loss": 0.8851,
+ "step": 1610
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6162942432513687e-05,
+ "loss": 0.9548,
+ "step": 1611
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6158033371727924e-05,
+ "loss": 0.9119,
+ "step": 1612
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153121919232962e-05,
+ "loss": 0.8921,
+ "step": 1613
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.614820807693635e-05,
+ "loss": 0.9396,
+ "step": 1614
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143291846746563e-05,
+ "loss": 0.9238,
+ "step": 1615
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613837323057301e-05,
+ "loss": 0.993,
+ "step": 1616
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6133452230326035e-05,
+ "loss": 0.8919,
+ "step": 1617
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6128528847916883e-05,
+ "loss": 0.8905,
+ "step": 1618
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123603085257746e-05,
+ "loss": 0.9203,
+ "step": 1619
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6118674944261732e-05,
+ "loss": 0.8348,
+ "step": 1620
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6113744426842882e-05,
+ "loss": 0.8634,
+ "step": 1621
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6108811534916137e-05,
+ "loss": 0.9083,
+ "step": 1622
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6103876270397387e-05,
+ "loss": 0.9459,
+ "step": 1623
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609893863520343e-05,
+ "loss": 0.8999,
+ "step": 1624
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609399863125198e-05,
+ "loss": 0.9203,
+ "step": 1625
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6089056260461687e-05,
+ "loss": 0.8976,
+ "step": 1626
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6084111524752107e-05,
+ "loss": 0.888,
+ "step": 1627
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607916442604372e-05,
+ "loss": 0.9126,
+ "step": 1628
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6074214966257914e-05,
+ "loss": 0.9084,
+ "step": 1629
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6069263147317015e-05,
+ "loss": 0.856,
+ "step": 1630
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6064308971144236e-05,
+ "loss": 0.9364,
+ "step": 1631
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605935243966374e-05,
+ "loss": 0.8824,
+ "step": 1632
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6054393554800574e-05,
+ "loss": 0.8943,
+ "step": 1633
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604943231848072e-05,
+ "loss": 0.9126,
+ "step": 1634
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.604446873263106e-05,
+ "loss": 0.9869,
+ "step": 1635
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6039502799179394e-05,
+ "loss": 0.9221,
+ "step": 1636
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6034534520054435e-05,
+ "loss": 0.9395,
+ "step": 1637
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.60295638971858e-05,
+ "loss": 0.9287,
+ "step": 1638
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.602459093250403e-05,
+ "loss": 0.946,
+ "step": 1639
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601961562794056e-05,
+ "loss": 0.8648,
+ "step": 1640
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.601463798542775e-05,
+ "loss": 0.3325,
+ "step": 1641
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6009658006898848e-05,
+ "loss": 0.8383,
+ "step": 1642
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600467569428803e-05,
+ "loss": 0.85,
+ "step": 1643
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599969104953036e-05,
+ "loss": 0.9851,
+ "step": 1644
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599470407456182e-05,
+ "loss": 0.8856,
+ "step": 1645
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989714771319297e-05,
+ "loss": 0.8561,
+ "step": 1646
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5984723141740578e-05,
+ "loss": 0.91,
+ "step": 1647
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.597972918776435e-05,
+ "loss": 0.9533,
+ "step": 1648
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5974732911330208e-05,
+ "loss": 0.9079,
+ "step": 1649
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5969734314378654e-05,
+ "loss": 0.8686,
+ "step": 1650
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5964733398851078e-05,
+ "loss": 0.9388,
+ "step": 1651
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5959730166689783e-05,
+ "loss": 0.9022,
+ "step": 1652
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5954724619837966e-05,
+ "loss": 0.869,
+ "step": 1653
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5949716760239722e-05,
+ "loss": 0.9018,
+ "step": 1654
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5944706589840046e-05,
+ "loss": 0.8919,
+ "step": 1655
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5939694110584833e-05,
+ "loss": 0.9298,
+ "step": 1656
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593467932442087e-05,
+ "loss": 0.8993,
+ "step": 1657
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5929662233295846e-05,
+ "loss": 0.8597,
+ "step": 1658
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5924642839158334e-05,
+ "loss": 0.9543,
+ "step": 1659
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.591962114395781e-05,
+ "loss": 0.8902,
+ "step": 1660
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5914597149644654e-05,
+ "loss": 0.9517,
+ "step": 1661
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5909570858170115e-05,
+ "loss": 0.8964,
+ "step": 1662
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5904542271486346e-05,
+ "loss": 0.8226,
+ "step": 1663
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5899511391546403e-05,
+ "loss": 0.9308,
+ "step": 1664
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5894478220304215e-05,
+ "loss": 0.8725,
+ "step": 1665
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5889442759714603e-05,
+ "loss": 0.357,
+ "step": 1666
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5884405011733294e-05,
+ "loss": 0.8884,
+ "step": 1667
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.587936497831688e-05,
+ "loss": 0.8428,
+ "step": 1668
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874322661422856e-05,
+ "loss": 0.8942,
+ "step": 1669
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5869278063009602e-05,
+ "loss": 0.9476,
+ "step": 1670
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.586423118503638e-05,
+ "loss": 0.8257,
+ "step": 1671
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.585918202946334e-05,
+ "loss": 0.9185,
+ "step": 1672
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5854130598251514e-05,
+ "loss": 0.9394,
+ "step": 1673
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5849076893362822e-05,
+ "loss": 0.9399,
+ "step": 1674
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.584402091676006e-05,
+ "loss": 0.9126,
+ "step": 1675
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5838962670406918e-05,
+ "loss": 0.9149,
+ "step": 1676
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5833902156267956e-05,
+ "loss": 0.938,
+ "step": 1677
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.582883937630862e-05,
+ "loss": 0.9592,
+ "step": 1678
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5823774332495236e-05,
+ "loss": 0.9069,
+ "step": 1679
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581870702679501e-05,
+ "loss": 0.918,
+ "step": 1680
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581363746117602e-05,
+ "loss": 0.8892,
+ "step": 1681
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580856563760724e-05,
+ "loss": 0.349,
+ "step": 1682
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5803491558058486e-05,
+ "loss": 0.9282,
+ "step": 1683
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579841522450049e-05,
+ "loss": 0.8366,
+ "step": 1684
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5793336638904838e-05,
+ "loss": 0.9711,
+ "step": 1685
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.578825580324399e-05,
+ "loss": 0.9039,
+ "step": 1686
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5783172719491288e-05,
+ "loss": 0.8891,
+ "step": 1687
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577808738962094e-05,
+ "loss": 0.843,
+ "step": 1688
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.577299981560803e-05,
+ "loss": 0.3497,
+ "step": 1689
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5767909999428513e-05,
+ "loss": 0.9345,
+ "step": 1690
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.576281794305922e-05,
+ "loss": 0.93,
+ "step": 1691
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575772364847784e-05,
+ "loss": 0.9059,
+ "step": 1692
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.575262711766294e-05,
+ "loss": 0.8892,
+ "step": 1693
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5747528352593956e-05,
+ "loss": 0.8448,
+ "step": 1694
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.574242735525119e-05,
+ "loss": 0.8748,
+ "step": 1695
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737324127615808e-05,
+ "loss": 0.9469,
+ "step": 1696
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5732218671669847e-05,
+ "loss": 0.9469,
+ "step": 1697
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727110989396205e-05,
+ "loss": 0.8397,
+ "step": 1698
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5722001082778645e-05,
+ "loss": 0.982,
+ "step": 1699
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716888953801805e-05,
+ "loss": 0.9181,
+ "step": 1700
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5711774604451168e-05,
+ "loss": 0.85,
+ "step": 1701
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706658036713093e-05,
+ "loss": 0.3376,
+ "step": 1702
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5701539252574795e-05,
+ "loss": 0.362,
+ "step": 1703
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5696418254024344e-05,
+ "loss": 0.8803,
+ "step": 1704
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.569129504305069e-05,
+ "loss": 0.904,
+ "step": 1705
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568616962164362e-05,
+ "loss": 0.8772,
+ "step": 1706
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5681041991793788e-05,
+ "loss": 0.8893,
+ "step": 1707
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567591215549271e-05,
+ "loss": 0.9031,
+ "step": 1708
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567078011473276e-05,
+ "loss": 0.868,
+ "step": 1709
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5665645871507152e-05,
+ "loss": 0.841,
+ "step": 1710
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5660509427809973e-05,
+ "loss": 0.8552,
+ "step": 1711
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565537078563616e-05,
+ "loss": 0.8433,
+ "step": 1712
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56502299469815e-05,
+ "loss": 0.8928,
+ "step": 1713
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564508691384264e-05,
+ "loss": 0.9244,
+ "step": 1714
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5639941688217063e-05,
+ "loss": 0.8972,
+ "step": 1715
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5634794272103126e-05,
+ "loss": 0.8691,
+ "step": 1716
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562964466750003e-05,
+ "loss": 0.898,
+ "step": 1717
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.562449287640781e-05,
+ "loss": 0.9465,
+ "step": 1718
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5619338900827368e-05,
+ "loss": 0.8966,
+ "step": 1719
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5614182742760448e-05,
+ "loss": 0.9137,
+ "step": 1720
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5609024404209643e-05,
+ "loss": 0.9303,
+ "step": 1721
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5603863887178393e-05,
+ "loss": 0.8798,
+ "step": 1722
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5598701193670983e-05,
+ "loss": 0.9252,
+ "step": 1723
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559353632569254e-05,
+ "loss": 0.8918,
+ "step": 1724
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588369285249048e-05,
+ "loss": 0.8562,
+ "step": 1725
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5583200074347318e-05,
+ "loss": 0.9207,
+ "step": 1726
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.557802869499501e-05,
+ "loss": 0.8755,
+ "step": 1727
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5572855149200637e-05,
+ "loss": 0.9165,
+ "step": 1728
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5567679438973543e-05,
+ "loss": 0.9501,
+ "step": 1729
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5562501566323906e-05,
+ "loss": 0.9016,
+ "step": 1730
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.555732153326276e-05,
+ "loss": 0.9402,
+ "step": 1731
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5552139341801965e-05,
+ "loss": 0.8856,
+ "step": 1732
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.554695499395423e-05,
+ "loss": 0.8768,
+ "step": 1733
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5541768491733092e-05,
+ "loss": 0.9014,
+ "step": 1734
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5536579837152927e-05,
+ "loss": 0.8418,
+ "step": 1735
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5531389032228955e-05,
+ "loss": 0.8946,
+ "step": 1736
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.552619607897722e-05,
+ "loss": 0.9021,
+ "step": 1737
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.55210009794146e-05,
+ "loss": 0.8611,
+ "step": 1738
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5515803735558827e-05,
+ "loss": 0.9054,
+ "step": 1739
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5510604349428438e-05,
+ "loss": 0.9597,
+ "step": 1740
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.550540282304282e-05,
+ "loss": 0.8706,
+ "step": 1741
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.550019915842218e-05,
+ "loss": 0.9222,
+ "step": 1742
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.549499335758757e-05,
+ "loss": 0.9049,
+ "step": 1743
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.548978542256086e-05,
+ "loss": 0.9216,
+ "step": 1744
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5484575355364744e-05,
+ "loss": 0.906,
+ "step": 1745
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5479363158022763e-05,
+ "loss": 0.8817,
+ "step": 1746
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547414883255927e-05,
+ "loss": 0.8898,
+ "step": 1747
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.546893238099945e-05,
+ "loss": 0.8456,
+ "step": 1748
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5463713805369312e-05,
+ "loss": 0.8614,
+ "step": 1749
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5458493107695688e-05,
+ "loss": 0.8622,
+ "step": 1750
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5453270290006237e-05,
+ "loss": 0.8739,
+ "step": 1751
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544804535432945e-05,
+ "loss": 0.9129,
+ "step": 1752
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.544281830269462e-05,
+ "loss": 0.8913,
+ "step": 1753
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437589137131882e-05,
+ "loss": 0.344,
+ "step": 1754
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5432357859672177e-05,
+ "loss": 0.3343,
+ "step": 1755
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542712447234728e-05,
+ "loss": 0.9442,
+ "step": 1756
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542188897718977e-05,
+ "loss": 0.8802,
+ "step": 1757
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5416651376233062e-05,
+ "loss": 0.9224,
+ "step": 1758
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5411411671511376e-05,
+ "loss": 0.3322,
+ "step": 1759
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5406169865059747e-05,
+ "loss": 0.8669,
+ "step": 1760
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5400925958914045e-05,
+ "loss": 0.8587,
+ "step": 1761
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5395679955110927e-05,
+ "loss": 0.9005,
+ "step": 1762
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53904318556879e-05,
+ "loss": 0.894,
+ "step": 1763
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5385181662683244e-05,
+ "loss": 0.9441,
+ "step": 1764
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5379929378136088e-05,
+ "loss": 0.8866,
+ "step": 1765
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5374675004086353e-05,
+ "loss": 0.8596,
+ "step": 1766
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5369418542574782e-05,
+ "loss": 0.9463,
+ "step": 1767
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536415999564292e-05,
+ "loss": 0.9096,
+ "step": 1768
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5358899365333123e-05,
+ "loss": 0.9065,
+ "step": 1769
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5353636653688563e-05,
+ "loss": 0.9263,
+ "step": 1770
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.534837186275322e-05,
+ "loss": 0.9296,
+ "step": 1771
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5343104994571877e-05,
+ "loss": 0.9103,
+ "step": 1772
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.533783605119012e-05,
+ "loss": 0.9263,
+ "step": 1773
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5332565034654344e-05,
+ "loss": 0.8902,
+ "step": 1774
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5327291947011763e-05,
+ "loss": 0.8487,
+ "step": 1775
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5322016790310373e-05,
+ "loss": 0.9495,
+ "step": 1776
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5316739566598985e-05,
+ "loss": 0.8295,
+ "step": 1777
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.531146027792722e-05,
+ "loss": 0.8741,
+ "step": 1778
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.530617892634548e-05,
+ "loss": 0.879,
+ "step": 1779
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5300895513904993e-05,
+ "loss": 0.8716,
+ "step": 1780
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.529561004265777e-05,
+ "loss": 0.3501,
+ "step": 1781
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5290322514656624e-05,
+ "loss": 0.9305,
+ "step": 1782
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5285032931955177e-05,
+ "loss": 0.8902,
+ "step": 1783
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527974129660784e-05,
+ "loss": 0.8797,
+ "step": 1784
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527444761066982e-05,
+ "loss": 0.9201,
+ "step": 1785
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5269151876197127e-05,
+ "loss": 0.8549,
+ "step": 1786
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5263854095246557e-05,
+ "loss": 0.9351,
+ "step": 1787
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5258554269875716e-05,
+ "loss": 0.9333,
+ "step": 1788
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5253252402142989e-05,
+ "loss": 0.914,
+ "step": 1789
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5247948494107566e-05,
+ "loss": 0.9352,
+ "step": 1790
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5242642547829416e-05,
+ "loss": 0.9375,
+ "step": 1791
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.523733456536931e-05,
+ "loss": 0.8714,
+ "step": 1792
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5232024548788813e-05,
+ "loss": 0.9665,
+ "step": 1793
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5226712500150267e-05,
+ "loss": 0.8637,
+ "step": 1794
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5221398421516816e-05,
+ "loss": 0.9001,
+ "step": 1795
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5216082314952383e-05,
+ "loss": 0.8326,
+ "step": 1796
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521076418252168e-05,
+ "loss": 0.9145,
+ "step": 1797
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5205444026290218e-05,
+ "loss": 0.9409,
+ "step": 1798
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5200121848324276e-05,
+ "loss": 0.899,
+ "step": 1799
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5194797650690926e-05,
+ "loss": 0.893,
+ "step": 1800
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5189471435458032e-05,
+ "loss": 0.9221,
+ "step": 1801
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5184143204694231e-05,
+ "loss": 0.7968,
+ "step": 1802
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5178812960468945e-05,
+ "loss": 0.9386,
+ "step": 1803
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5173480704852379e-05,
+ "loss": 0.8234,
+ "step": 1804
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5168146439915525e-05,
+ "loss": 0.8813,
+ "step": 1805
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5162810167730144e-05,
+ "loss": 0.8989,
+ "step": 1806
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5157471890368785e-05,
+ "loss": 0.9244,
+ "step": 1807
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152131609904773e-05,
+ "loss": 0.9251,
+ "step": 1808
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5146789328412213e-05,
+ "loss": 0.8993,
+ "step": 1809
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141445047965984e-05,
+ "loss": 0.342,
+ "step": 1810
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5136098770641741e-05,
+ "loss": 0.9025,
+ "step": 1811
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.513075049851592e-05,
+ "loss": 0.3408,
+ "step": 1812
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5125400233665728e-05,
+ "loss": 0.8834,
+ "step": 1813
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5120047978169146e-05,
+ "loss": 0.8897,
+ "step": 1814
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5114693734104926e-05,
+ "loss": 0.8895,
+ "step": 1815
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5109337503552594e-05,
+ "loss": 0.9055,
+ "step": 1816
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5103979288592454e-05,
+ "loss": 0.8434,
+ "step": 1817
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5098619091305571e-05,
+ "loss": 0.9234,
+ "step": 1818
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5093256913773786e-05,
+ "loss": 0.7853,
+ "step": 1819
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50878927580797e-05,
+ "loss": 0.9126,
+ "step": 1820
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5082526626306698e-05,
+ "loss": 0.3308,
+ "step": 1821
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5077158520538921e-05,
+ "loss": 0.821,
+ "step": 1822
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5071788442861277e-05,
+ "loss": 0.8598,
+ "step": 1823
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5066416395359444e-05,
+ "loss": 0.8984,
+ "step": 1824
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5061042380119864e-05,
+ "loss": 0.8945,
+ "step": 1825
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5055666399229743e-05,
+ "loss": 0.9365,
+ "step": 1826
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5050288454777047e-05,
+ "loss": 0.3325,
+ "step": 1827
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.504490854885051e-05,
+ "loss": 0.9344,
+ "step": 1828
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5039526683539627e-05,
+ "loss": 0.961,
+ "step": 1829
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5034142860934649e-05,
+ "loss": 0.8653,
+ "step": 1830
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5028757083126594e-05,
+ "loss": 0.8737,
+ "step": 1831
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5023369352207229e-05,
+ "loss": 0.8489,
+ "step": 1832
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5017979670269096e-05,
+ "loss": 0.8895,
+ "step": 1833
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.501258803940548e-05,
+ "loss": 0.9791,
+ "step": 1834
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500719446171043e-05,
+ "loss": 0.853,
+ "step": 1835
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.500179893927875e-05,
+ "loss": 0.8926,
+ "step": 1836
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996401474205997e-05,
+ "loss": 0.3301,
+ "step": 1837
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4991002068588484e-05,
+ "loss": 0.9411,
+ "step": 1838
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985600724523282e-05,
+ "loss": 0.9024,
+ "step": 1839
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4980197444108205e-05,
+ "loss": 0.9483,
+ "step": 1840
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4974792229441826e-05,
+ "loss": 0.9167,
+ "step": 1841
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4969385082623473e-05,
+ "loss": 0.9055,
+ "step": 1842
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4963976005753216e-05,
+ "loss": 0.8377,
+ "step": 1843
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4958565000931877e-05,
+ "loss": 0.9333,
+ "step": 1844
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4953152070261027e-05,
+ "loss": 0.8977,
+ "step": 1845
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.494773721584299e-05,
+ "loss": 0.9427,
+ "step": 1846
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4942320439780833e-05,
+ "loss": 0.8719,
+ "step": 1847
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4936901744178367e-05,
+ "loss": 0.8691,
+ "step": 1848
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4931481131140149e-05,
+ "loss": 0.8912,
+ "step": 1849
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4926058602771484e-05,
+ "loss": 0.356,
+ "step": 1850
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4920634161178424e-05,
+ "loss": 0.8958,
+ "step": 1851
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4915207808467756e-05,
+ "loss": 0.8454,
+ "step": 1852
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4909779546747011e-05,
+ "loss": 0.9246,
+ "step": 1853
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4904349378124467e-05,
+ "loss": 0.8342,
+ "step": 1854
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.489891730470914e-05,
+ "loss": 0.913,
+ "step": 1855
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4893483328610778e-05,
+ "loss": 0.8311,
+ "step": 1856
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.488804745193988e-05,
+ "loss": 0.8267,
+ "step": 1857
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4882609676807675e-05,
+ "loss": 0.8444,
+ "step": 1858
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4877170005326136e-05,
+ "loss": 0.8312,
+ "step": 1859
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4871728439607967e-05,
+ "loss": 0.9051,
+ "step": 1860
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4866284981766607e-05,
+ "loss": 0.9088,
+ "step": 1861
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4860839633916236e-05,
+ "loss": 0.8904,
+ "step": 1862
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4855392398171762e-05,
+ "loss": 0.9397,
+ "step": 1863
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.484994327664883e-05,
+ "loss": 0.8327,
+ "step": 1864
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4844492271463814e-05,
+ "loss": 0.9035,
+ "step": 1865
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4839039384733821e-05,
+ "loss": 0.8804,
+ "step": 1866
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4833584618576695e-05,
+ "loss": 0.8932,
+ "step": 1867
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4828127975111e-05,
+ "loss": 0.9195,
+ "step": 1868
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4822669456456031e-05,
+ "loss": 0.8813,
+ "step": 1869
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4817209064731819e-05,
+ "loss": 0.9215,
+ "step": 1870
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4811746802059115e-05,
+ "loss": 0.9449,
+ "step": 1871
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.48062826705594e-05,
+ "loss": 0.8442,
+ "step": 1872
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4800816672354876e-05,
+ "loss": 0.8994,
+ "step": 1873
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4795348809568477e-05,
+ "loss": 0.8873,
+ "step": 1874
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4789879084323858e-05,
+ "loss": 0.8457,
+ "step": 1875
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4784407498745394e-05,
+ "loss": 0.9257,
+ "step": 1876
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.477893405495819e-05,
+ "loss": 0.9089,
+ "step": 1877
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4773458755088068e-05,
+ "loss": 0.9125,
+ "step": 1878
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4767981601261567e-05,
+ "loss": 0.9097,
+ "step": 1879
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4762502595605957e-05,
+ "loss": 0.877,
+ "step": 1880
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4757021740249213e-05,
+ "loss": 0.8929,
+ "step": 1881
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4751539037320044e-05,
+ "loss": 0.877,
+ "step": 1882
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4746054488947863e-05,
+ "loss": 0.9186,
+ "step": 1883
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4740568097262811e-05,
+ "loss": 0.9156,
+ "step": 1884
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473507986439573e-05,
+ "loss": 0.8942,
+ "step": 1885
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4729589792478193e-05,
+ "loss": 0.9098,
+ "step": 1886
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4724097883642482e-05,
+ "loss": 0.9079,
+ "step": 1887
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4718604140021588e-05,
+ "loss": 0.8696,
+ "step": 1888
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471310856374922e-05,
+ "loss": 0.8806,
+ "step": 1889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.470761115695979e-05,
+ "loss": 0.8841,
+ "step": 1890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702111921788437e-05,
+ "loss": 0.3691,
+ "step": 1891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4696610860370997e-05,
+ "loss": 0.8541,
+ "step": 1892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4691107974844015e-05,
+ "loss": 0.8623,
+ "step": 1893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.468560326734475e-05,
+ "loss": 0.9252,
+ "step": 1894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4680096740011172e-05,
+ "loss": 0.9047,
+ "step": 1895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4674588394981948e-05,
+ "loss": 0.8317,
+ "step": 1896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4669078234396454e-05,
+ "loss": 0.8868,
+ "step": 1897
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4663566260394775e-05,
+ "loss": 0.9246,
+ "step": 1898
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4658052475117704e-05,
+ "loss": 0.9418,
+ "step": 1899
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4652536880706723e-05,
+ "loss": 0.977,
+ "step": 1900
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4647019479304028e-05,
+ "loss": 0.935,
+ "step": 1901
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4641500273052516e-05,
+ "loss": 0.881,
+ "step": 1902
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.463597926409578e-05,
+ "loss": 0.8564,
+ "step": 1903
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4630456454578122e-05,
+ "loss": 0.9335,
+ "step": 1904
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.462493184664453e-05,
+ "loss": 0.8867,
+ "step": 1905
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4619405442440702e-05,
+ "loss": 0.8895,
+ "step": 1906
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4613877244113033e-05,
+ "loss": 0.8933,
+ "step": 1907
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4608347253808605e-05,
+ "loss": 0.3472,
+ "step": 1908
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.460281547367521e-05,
+ "loss": 0.9395,
+ "step": 1909
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4597281905861318e-05,
+ "loss": 0.9227,
+ "step": 1910
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4591746552516109e-05,
+ "loss": 0.3281,
+ "step": 1911
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4586209415789452e-05,
+ "loss": 0.8451,
+ "step": 1912
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4580670497831904e-05,
+ "loss": 0.9553,
+ "step": 1913
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4575129800794718e-05,
+ "loss": 0.9058,
+ "step": 1914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4569587326829834e-05,
+ "loss": 0.9239,
+ "step": 1915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564043078089891e-05,
+ "loss": 0.8449,
+ "step": 1916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4558497056728205e-05,
+ "loss": 0.9244,
+ "step": 1917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4552949264898795e-05,
+ "loss": 0.8445,
+ "step": 1918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4547399704756348e-05,
+ "loss": 0.8401,
+ "step": 1919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541848378456255e-05,
+ "loss": 0.8877,
+ "step": 1920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4536295288154594e-05,
+ "loss": 0.9163,
+ "step": 1921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4530740436008111e-05,
+ "loss": 0.8836,
+ "step": 1922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.452518382417425e-05,
+ "loss": 0.9343,
+ "step": 1923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4519625454811135e-05,
+ "loss": 0.8714,
+ "step": 1924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4514065330077575e-05,
+ "loss": 0.9157,
+ "step": 1925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4508503452133053e-05,
+ "loss": 0.8121,
+ "step": 1926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4502939823137744e-05,
+ "loss": 0.9604,
+ "step": 1927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4497374445252496e-05,
+ "loss": 0.8782,
+ "step": 1928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4491807320638835e-05,
+ "loss": 0.9134,
+ "step": 1929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4486238451458972e-05,
+ "loss": 0.8633,
+ "step": 1930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4480667839875786e-05,
+ "loss": 0.9408,
+ "step": 1931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4475095488052843e-05,
+ "loss": 0.8734,
+ "step": 1932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4469521398154381e-05,
+ "loss": 0.927,
+ "step": 1933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4463945572345308e-05,
+ "loss": 0.898,
+ "step": 1934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4458368012791213e-05,
+ "loss": 0.3371,
+ "step": 1935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4452788721658355e-05,
+ "loss": 0.9782,
+ "step": 1936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4447207701113669e-05,
+ "loss": 0.8966,
+ "step": 1937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4441624953324755e-05,
+ "loss": 0.9522,
+ "step": 1938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4436040480459891e-05,
+ "loss": 0.9068,
+ "step": 1939
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.443045428468802e-05,
+ "loss": 0.8675,
+ "step": 1940
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4424866368178761e-05,
+ "loss": 0.8399,
+ "step": 1941
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441927673310239e-05,
+ "loss": 0.8841,
+ "step": 1942
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4413685381629855e-05,
+ "loss": 0.9024,
+ "step": 1943
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440809231593278e-05,
+ "loss": 0.8847,
+ "step": 1944
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4402497538183444e-05,
+ "loss": 0.8777,
+ "step": 1945
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4396901050554794e-05,
+ "loss": 0.9327,
+ "step": 1946
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391302855220442e-05,
+ "loss": 0.8859,
+ "step": 1947
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4385702954354662e-05,
+ "loss": 0.8798,
+ "step": 1948
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.438010135013239e-05,
+ "loss": 0.9352,
+ "step": 1949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4374498044729225e-05,
+ "loss": 0.3367,
+ "step": 1950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4368893040321428e-05,
+ "loss": 0.9483,
+ "step": 1951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4363286339085915e-05,
+ "loss": 0.8883,
+ "step": 1952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.435767794320027e-05,
+ "loss": 0.9052,
+ "step": 1953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4352067854842724e-05,
+ "loss": 0.8696,
+ "step": 1954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434645607619217e-05,
+ "loss": 0.9393,
+ "step": 1955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.434084260942816e-05,
+ "loss": 0.9083,
+ "step": 1956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4335227456730902e-05,
+ "loss": 0.3333,
+ "step": 1957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4329610620281253e-05,
+ "loss": 1.0002,
+ "step": 1958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4323992102260733e-05,
+ "loss": 0.8595,
+ "step": 1959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4318371904851502e-05,
+ "loss": 0.8919,
+ "step": 1960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4312750030236382e-05,
+ "loss": 0.8652,
+ "step": 1961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4307126480598852e-05,
+ "loss": 0.8431,
+ "step": 1962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4301501258123024e-05,
+ "loss": 0.9213,
+ "step": 1963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4295874364993672e-05,
+ "loss": 0.9258,
+ "step": 1964
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4290245803396221e-05,
+ "loss": 0.9051,
+ "step": 1965
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4284615575516737e-05,
+ "loss": 0.8925,
+ "step": 1966
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4278983683541934e-05,
+ "loss": 0.3409,
+ "step": 1967
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4273350129659173e-05,
+ "loss": 0.872,
+ "step": 1968
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4267714916056465e-05,
+ "loss": 0.8765,
+ "step": 1969
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.426207804492246e-05,
+ "loss": 0.8701,
+ "step": 1970
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4256439518446456e-05,
+ "loss": 0.3227,
+ "step": 1971
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4250799338818388e-05,
+ "loss": 0.8665,
+ "step": 1972
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.424515750822884e-05,
+ "loss": 0.8541,
+ "step": 1973
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4239514028869032e-05,
+ "loss": 0.9197,
+ "step": 1974
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4233868902930827e-05,
+ "loss": 0.8156,
+ "step": 1975
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4228222132606729e-05,
+ "loss": 0.8795,
+ "step": 1976
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4222573720089874e-05,
+ "loss": 0.9272,
+ "step": 1977
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216923667574042e-05,
+ "loss": 0.8261,
+ "step": 1978
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4211271977253653e-05,
+ "loss": 0.9354,
+ "step": 1979
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4205618651323753e-05,
+ "loss": 0.9077,
+ "step": 1980
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4199963691980027e-05,
+ "loss": 0.8562,
+ "step": 1981
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4194307101418805e-05,
+ "loss": 0.917,
+ "step": 1982
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4188648881837033e-05,
+ "loss": 0.919,
+ "step": 1983
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4182989035432299e-05,
+ "loss": 0.8722,
+ "step": 1984
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4177327564402825e-05,
+ "loss": 0.8983,
+ "step": 1985
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4171664470947464e-05,
+ "loss": 0.9448,
+ "step": 1986
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416599975726569e-05,
+ "loss": 0.9818,
+ "step": 1987
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4160333425557616e-05,
+ "loss": 0.8398,
+ "step": 1988
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4154665478023977e-05,
+ "loss": 0.8986,
+ "step": 1989
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4148995916866139e-05,
+ "loss": 0.8588,
+ "step": 1990
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.41433247442861e-05,
+ "loss": 0.81,
+ "step": 1991
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4137651962486472e-05,
+ "loss": 0.3643,
+ "step": 1992
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4131977573670499e-05,
+ "loss": 0.931,
+ "step": 1993
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412630158004205e-05,
+ "loss": 0.9019,
+ "step": 1994
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4120623983805617e-05,
+ "loss": 0.9074,
+ "step": 1995
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4114944787166307e-05,
+ "loss": 0.9205,
+ "step": 1996
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4109263992329858e-05,
+ "loss": 0.9037,
+ "step": 1997
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4103581601502629e-05,
+ "loss": 0.3415,
+ "step": 1998
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.409789761689159e-05,
+ "loss": 0.8838,
+ "step": 1999
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4092212040704336e-05,
+ "loss": 0.8955,
+ "step": 2000
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.408652487514908e-05,
+ "loss": 0.9042,
+ "step": 2001
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.408083612243465e-05,
+ "loss": 0.3112,
+ "step": 2002
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4075145784770496e-05,
+ "loss": 0.9066,
+ "step": 2003
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4069453864366678e-05,
+ "loss": 0.9343,
+ "step": 2004
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063760363433867e-05,
+ "loss": 0.3589,
+ "step": 2005
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.405806528418336e-05,
+ "loss": 0.8626,
+ "step": 2006
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4052368628827057e-05,
+ "loss": 0.9043,
+ "step": 2007
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4046670399577478e-05,
+ "loss": 0.9,
+ "step": 2008
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4040970598647742e-05,
+ "loss": 0.344,
+ "step": 2009
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4035269228251589e-05,
+ "loss": 0.8871,
+ "step": 2010
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4029566290603368e-05,
+ "loss": 0.8177,
+ "step": 2011
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4023861787918031e-05,
+ "loss": 0.9115,
+ "step": 2012
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4018155722411144e-05,
+ "loss": 0.8899,
+ "step": 2013
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4012448096298874e-05,
+ "loss": 0.9085,
+ "step": 2014
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4006738911798001e-05,
+ "loss": 0.8505,
+ "step": 2015
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.40010281711259e-05,
+ "loss": 0.8961,
+ "step": 2016
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3995315876500565e-05,
+ "loss": 0.9091,
+ "step": 2017
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3989602030140581e-05,
+ "loss": 0.9234,
+ "step": 2018
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.398388663426514e-05,
+ "loss": 0.8828,
+ "step": 2019
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3978169691094037e-05,
+ "loss": 0.8319,
+ "step": 2020
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3972451202847665e-05,
+ "loss": 0.9261,
+ "step": 2021
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3966731171747024e-05,
+ "loss": 0.8725,
+ "step": 2022
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3961009600013702e-05,
+ "loss": 0.8844,
+ "step": 2023
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3955286489869894e-05,
+ "loss": 0.8844,
+ "step": 2024
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.394956184353839e-05,
+ "loss": 0.8217,
+ "step": 2025
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3943835663242577e-05,
+ "loss": 0.9053,
+ "step": 2026
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3938107951206438e-05,
+ "loss": 0.8662,
+ "step": 2027
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932378709654548e-05,
+ "loss": 0.8742,
+ "step": 2028
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3926647940812081e-05,
+ "loss": 0.889,
+ "step": 2029
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39209156469048e-05,
+ "loss": 0.8695,
+ "step": 2030
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3915181830159061e-05,
+ "loss": 0.9434,
+ "step": 2031
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3909446492801819e-05,
+ "loss": 0.8806,
+ "step": 2032
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3903709637060605e-05,
+ "loss": 0.9066,
+ "step": 2033
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3897971265163546e-05,
+ "loss": 0.8069,
+ "step": 2034
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3892231379339369e-05,
+ "loss": 0.8629,
+ "step": 2035
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3886489981817375e-05,
+ "loss": 0.9743,
+ "step": 2036
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3880747074827454e-05,
+ "loss": 0.8458,
+ "step": 2037
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3875002660600085e-05,
+ "loss": 0.8798,
+ "step": 2038
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386925674136634e-05,
+ "loss": 0.3686,
+ "step": 2039
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3863509319357857e-05,
+ "loss": 0.9186,
+ "step": 2040
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3857760396806876e-05,
+ "loss": 0.2869,
+ "step": 2041
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3852009975946209e-05,
+ "loss": 0.9297,
+ "step": 2042
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3846258059009252e-05,
+ "loss": 0.8997,
+ "step": 2043
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.384050464822999e-05,
+ "loss": 0.9275,
+ "step": 2044
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.383474974584297e-05,
+ "loss": 0.8687,
+ "step": 2045
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3828993354083342e-05,
+ "loss": 0.8515,
+ "step": 2046
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3823235475186816e-05,
+ "loss": 0.9319,
+ "step": 2047
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3817476111389685e-05,
+ "loss": 0.8814,
+ "step": 2048
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3811715264928824e-05,
+ "loss": 0.9526,
+ "step": 2049
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3805952938041674e-05,
+ "loss": 0.9228,
+ "step": 2050
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3800189132966257e-05,
+ "loss": 0.921,
+ "step": 2051
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3794423851941174e-05,
+ "loss": 0.8548,
+ "step": 2052
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.378865709720559e-05,
+ "loss": 0.359,
+ "step": 2053
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3782888870999245e-05,
+ "loss": 0.879,
+ "step": 2054
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.377711917556245e-05,
+ "loss": 0.8598,
+ "step": 2055
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3771348013136096e-05,
+ "loss": 0.9131,
+ "step": 2056
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765575385961627e-05,
+ "loss": 0.8867,
+ "step": 2057
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3759801296281072e-05,
+ "loss": 0.3295,
+ "step": 2058
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3754025746337014e-05,
+ "loss": 0.8873,
+ "step": 2059
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3748248738372616e-05,
+ "loss": 0.893,
+ "step": 2060
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3742470274631599e-05,
+ "loss": 0.9075,
+ "step": 2061
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3736690357358253e-05,
+ "loss": 0.9009,
+ "step": 2062
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3730908988797427e-05,
+ "loss": 0.8598,
+ "step": 2063
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3725126171194543e-05,
+ "loss": 0.8945,
+ "step": 2064
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.371934190679558e-05,
+ "loss": 0.9255,
+ "step": 2065
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3713556197847076e-05,
+ "loss": 0.8482,
+ "step": 2066
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3707769046596136e-05,
+ "loss": 0.8548,
+ "step": 2067
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3701980455290425e-05,
+ "loss": 0.8895,
+ "step": 2068
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3696190426178162e-05,
+ "loss": 0.8559,
+ "step": 2069
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3690398961508128e-05,
+ "loss": 0.3352,
+ "step": 2070
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3684606063529662e-05,
+ "loss": 0.3463,
+ "step": 2071
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3678811734492659e-05,
+ "loss": 0.9215,
+ "step": 2072
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.367301597664757e-05,
+ "loss": 0.8368,
+ "step": 2073
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.36672187922454e-05,
+ "loss": 0.9162,
+ "step": 2074
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3661420183537705e-05,
+ "loss": 0.9218,
+ "step": 2075
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3655620152776605e-05,
+ "loss": 0.8654,
+ "step": 2076
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.364981870221476e-05,
+ "loss": 0.8559,
+ "step": 2077
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.364401583410539e-05,
+ "loss": 0.8737,
+ "step": 2078
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3638211550702256e-05,
+ "loss": 0.9215,
+ "step": 2079
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.363240585425968e-05,
+ "loss": 0.875,
+ "step": 2080
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362659874703253e-05,
+ "loss": 0.8382,
+ "step": 2081
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3620790231276213e-05,
+ "loss": 0.9178,
+ "step": 2082
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3614980309246692e-05,
+ "loss": 0.841,
+ "step": 2083
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609168983200474e-05,
+ "loss": 0.9028,
+ "step": 2084
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3603356255394613e-05,
+ "loss": 0.8932,
+ "step": 2085
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597542128086702e-05,
+ "loss": 0.9153,
+ "step": 2086
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3591726603534885e-05,
+ "loss": 0.919,
+ "step": 2087
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585909683997842e-05,
+ "loss": 0.9014,
+ "step": 2088
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3580091371734798e-05,
+ "loss": 0.8677,
+ "step": 2089
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357427166900552e-05,
+ "loss": 0.8769,
+ "step": 2090
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3568450578070309e-05,
+ "loss": 0.9164,
+ "step": 2091
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3562628101190015e-05,
+ "loss": 0.9132,
+ "step": 2092
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3556804240626019e-05,
+ "loss": 0.9407,
+ "step": 2093
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3550978998640241e-05,
+ "loss": 0.8397,
+ "step": 2094
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3545152377495136e-05,
+ "loss": 0.9516,
+ "step": 2095
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3539324379453698e-05,
+ "loss": 0.889,
+ "step": 2096
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3533495006779455e-05,
+ "loss": 0.8179,
+ "step": 2097
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3527664261736471e-05,
+ "loss": 0.9019,
+ "step": 2098
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3521832146589335e-05,
+ "loss": 0.9463,
+ "step": 2099
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3515998663603174e-05,
+ "loss": 0.8962,
+ "step": 2100
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3510163815043647e-05,
+ "loss": 0.8493,
+ "step": 2101
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3504327603176943e-05,
+ "loss": 0.7998,
+ "step": 2102
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3498490030269782e-05,
+ "loss": 0.8454,
+ "step": 2103
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3492651098589398e-05,
+ "loss": 0.9641,
+ "step": 2104
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3486810810403578e-05,
+ "loss": 0.9498,
+ "step": 2105
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.348096916798062e-05,
+ "loss": 0.874,
+ "step": 2106
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3475126173589343e-05,
+ "loss": 0.8762,
+ "step": 2107
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3469281829499107e-05,
+ "loss": 0.8726,
+ "step": 2108
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3463436137979786e-05,
+ "loss": 0.9508,
+ "step": 2109
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3457589101301776e-05,
+ "loss": 0.8612,
+ "step": 2110
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451740721736005e-05,
+ "loss": 0.8746,
+ "step": 2111
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3445891001553905e-05,
+ "loss": 0.8859,
+ "step": 2112
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3440039943027452e-05,
+ "loss": 0.3673,
+ "step": 2113
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3434187548429126e-05,
+ "loss": 0.9588,
+ "step": 2114
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3428333820031922e-05,
+ "loss": 0.8508,
+ "step": 2115
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3422478760109371e-05,
+ "loss": 0.3524,
+ "step": 2116
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3416622370935507e-05,
+ "loss": 0.9202,
+ "step": 2117
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3410764654784885e-05,
+ "loss": 0.8818,
+ "step": 2118
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3404905613932573e-05,
+ "loss": 0.8155,
+ "step": 2119
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3399045250654152e-05,
+ "loss": 0.8723,
+ "step": 2120
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3393183567225724e-05,
+ "loss": 0.9067,
+ "step": 2121
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3387320565923901e-05,
+ "loss": 0.8955,
+ "step": 2122
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.33814562490258e-05,
+ "loss": 0.8651,
+ "step": 2123
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3375590618809056e-05,
+ "loss": 0.8972,
+ "step": 2124
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3369723677551813e-05,
+ "loss": 0.8249,
+ "step": 2125
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3363855427532724e-05,
+ "loss": 0.3413,
+ "step": 2126
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3357985871030948e-05,
+ "loss": 0.9409,
+ "step": 2127
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3352115010326155e-05,
+ "loss": 0.8396,
+ "step": 2128
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3346242847698516e-05,
+ "loss": 0.9014,
+ "step": 2129
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3340369385428713e-05,
+ "loss": 0.9671,
+ "step": 2130
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3334494625797936e-05,
+ "loss": 0.9021,
+ "step": 2131
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3328618571087867e-05,
+ "loss": 0.8415,
+ "step": 2132
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.33227412235807e-05,
+ "loss": 0.9408,
+ "step": 2133
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3316862585559132e-05,
+ "loss": 0.8912,
+ "step": 2134
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3310982659306352e-05,
+ "loss": 0.9067,
+ "step": 2135
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305101447106064e-05,
+ "loss": 0.878,
+ "step": 2136
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3299218951242456e-05,
+ "loss": 0.8873,
+ "step": 2137
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293335174000226e-05,
+ "loss": 0.9478,
+ "step": 2138
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.328745011766456e-05,
+ "loss": 0.9111,
+ "step": 2139
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281563784521154e-05,
+ "loss": 0.8652,
+ "step": 2140
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3275676176856185e-05,
+ "loss": 0.9174,
+ "step": 2141
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269787296956333e-05,
+ "loss": 0.8871,
+ "step": 2142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3263897147108778e-05,
+ "loss": 0.8863,
+ "step": 2143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3258005729601178e-05,
+ "loss": 0.8067,
+ "step": 2144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3252113046721692e-05,
+ "loss": 0.8547,
+ "step": 2145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3246219100758974e-05,
+ "loss": 0.8756,
+ "step": 2146
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3240323894002166e-05,
+ "loss": 0.9105,
+ "step": 2147
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3234427428740895e-05,
+ "loss": 0.9473,
+ "step": 2148
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3228529707265279e-05,
+ "loss": 0.8832,
+ "step": 2149
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322263073186593e-05,
+ "loss": 0.8785,
+ "step": 2150
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3216730504833938e-05,
+ "loss": 0.8199,
+ "step": 2151
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3210829028460883e-05,
+ "loss": 0.901,
+ "step": 2152
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3204926305038832e-05,
+ "loss": 0.869,
+ "step": 2153
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3199022336860335e-05,
+ "loss": 0.9062,
+ "step": 2154
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3193117126218425e-05,
+ "loss": 0.8614,
+ "step": 2155
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3187210675406617e-05,
+ "loss": 0.9126,
+ "step": 2156
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.318130298671891e-05,
+ "loss": 0.9333,
+ "step": 2157
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3175394062449777e-05,
+ "loss": 0.8785,
+ "step": 2158
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3169483904894185e-05,
+ "loss": 0.8519,
+ "step": 2159
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3163572516347565e-05,
+ "loss": 0.903,
+ "step": 2160
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3157659899105835e-05,
+ "loss": 0.9312,
+ "step": 2161
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.315174605546538e-05,
+ "loss": 0.912,
+ "step": 2162
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3145830987723081e-05,
+ "loss": 0.8795,
+ "step": 2163
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3139914698176273e-05,
+ "loss": 0.8608,
+ "step": 2164
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3133997189122777e-05,
+ "loss": 0.8786,
+ "step": 2165
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3128078462860887e-05,
+ "loss": 0.8291,
+ "step": 2166
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122158521689367e-05,
+ "loss": 0.8889,
+ "step": 2167
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3116237367907454e-05,
+ "loss": 0.8664,
+ "step": 2168
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3110315003814855e-05,
+ "loss": 0.8604,
+ "step": 2169
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3104391431711748e-05,
+ "loss": 0.8725,
+ "step": 2170
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309846665389878e-05,
+ "loss": 0.8544,
+ "step": 2171
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309254067267707e-05,
+ "loss": 0.8509,
+ "step": 2172
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3086613490348198e-05,
+ "loss": 0.898,
+ "step": 2173
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3080685109214208e-05,
+ "loss": 0.8706,
+ "step": 2174
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3074755531577628e-05,
+ "loss": 0.8691,
+ "step": 2175
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3068824759741428e-05,
+ "loss": 0.8378,
+ "step": 2176
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.306289279600905e-05,
+ "loss": 0.8933,
+ "step": 2177
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3056959642684404e-05,
+ "loss": 0.9406,
+ "step": 2178
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.305102530207186e-05,
+ "loss": 0.8409,
+ "step": 2179
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3045089776476246e-05,
+ "loss": 0.9124,
+ "step": 2180
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3039153068202853e-05,
+ "loss": 0.8653,
+ "step": 2181
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3033215179557424e-05,
+ "loss": 0.8899,
+ "step": 2182
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3027276112846172e-05,
+ "loss": 0.9423,
+ "step": 2183
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3021335870375763e-05,
+ "loss": 0.9004,
+ "step": 2184
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3015394454453316e-05,
+ "loss": 0.9119,
+ "step": 2185
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3009451867386411e-05,
+ "loss": 0.9051,
+ "step": 2186
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3003508111483077e-05,
+ "loss": 0.9389,
+ "step": 2187
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.29975631890518e-05,
+ "loss": 0.927,
+ "step": 2188
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2991617102401524e-05,
+ "loss": 0.8467,
+ "step": 2189
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2985669853841635e-05,
+ "loss": 0.8822,
+ "step": 2190
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.297972144568198e-05,
+ "loss": 0.8567,
+ "step": 2191
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2973771880232853e-05,
+ "loss": 0.869,
+ "step": 2192
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2967821159804994e-05,
+ "loss": 0.8298,
+ "step": 2193
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961869286709594e-05,
+ "loss": 0.8769,
+ "step": 2194
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.295591626325829e-05,
+ "loss": 0.9496,
+ "step": 2195
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949962091763174e-05,
+ "loss": 0.8732,
+ "step": 2196
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2944006774536773e-05,
+ "loss": 0.9003,
+ "step": 2197
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2938050313892062e-05,
+ "loss": 0.3477,
+ "step": 2198
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2932092712142468e-05,
+ "loss": 0.8843,
+ "step": 2199
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.292613397160185e-05,
+ "loss": 0.3639,
+ "step": 2200
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2920174094584514e-05,
+ "loss": 0.8033,
+ "step": 2201
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2914213083405211e-05,
+ "loss": 0.8932,
+ "step": 2202
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2908250940379124e-05,
+ "loss": 0.8685,
+ "step": 2203
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2902287667821885e-05,
+ "loss": 0.8218,
+ "step": 2204
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.289632326804956e-05,
+ "loss": 0.9003,
+ "step": 2205
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2890357743378649e-05,
+ "loss": 0.8658,
+ "step": 2206
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2884391096126098e-05,
+ "loss": 0.3369,
+ "step": 2207
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2878423328609281e-05,
+ "loss": 0.8542,
+ "step": 2208
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2872454443146015e-05,
+ "loss": 0.8544,
+ "step": 2209
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286648444205454e-05,
+ "loss": 0.9076,
+ "step": 2210
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2860513327653537e-05,
+ "loss": 0.3334,
+ "step": 2211
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2854541102262119e-05,
+ "loss": 0.9436,
+ "step": 2212
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.284856776819983e-05,
+ "loss": 0.8395,
+ "step": 2213
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2842593327786649e-05,
+ "loss": 0.9054,
+ "step": 2214
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2836617783342968e-05,
+ "loss": 0.8798,
+ "step": 2215
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2830641137189628e-05,
+ "loss": 0.8432,
+ "step": 2216
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.282466339164789e-05,
+ "loss": 0.876,
+ "step": 2217
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2818684549039437e-05,
+ "loss": 0.9121,
+ "step": 2218
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2812704611686386e-05,
+ "loss": 0.9487,
+ "step": 2219
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2806723581911274e-05,
+ "loss": 0.889,
+ "step": 2220
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800741462037065e-05,
+ "loss": 0.8988,
+ "step": 2221
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2794758254387147e-05,
+ "loss": 0.3435,
+ "step": 2222
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2788773961285323e-05,
+ "loss": 0.8443,
+ "step": 2223
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2782788585055829e-05,
+ "loss": 0.9681,
+ "step": 2224
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2776802128023317e-05,
+ "loss": 0.8569,
+ "step": 2225
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2770814592512853e-05,
+ "loss": 0.8902,
+ "step": 2226
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2764825980849931e-05,
+ "loss": 0.8468,
+ "step": 2227
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2758836295360455e-05,
+ "loss": 0.8717,
+ "step": 2228
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2752845538370752e-05,
+ "loss": 0.7939,
+ "step": 2229
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2746853712207567e-05,
+ "loss": 0.8697,
+ "step": 2230
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.274086081919805e-05,
+ "loss": 0.8292,
+ "step": 2231
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273486686166977e-05,
+ "loss": 0.9003,
+ "step": 2232
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2728871841950719e-05,
+ "loss": 0.8565,
+ "step": 2233
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2722875762369288e-05,
+ "loss": 0.9289,
+ "step": 2234
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2716878625254287e-05,
+ "loss": 0.9051,
+ "step": 2235
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2710880432934934e-05,
+ "loss": 0.9049,
+ "step": 2236
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.270488118774086e-05,
+ "loss": 0.8711,
+ "step": 2237
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26988808920021e-05,
+ "loss": 0.9489,
+ "step": 2238
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26928795480491e-05,
+ "loss": 0.8633,
+ "step": 2239
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2686877158212715e-05,
+ "loss": 0.8904,
+ "step": 2240
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26808737248242e-05,
+ "loss": 0.8486,
+ "step": 2241
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2674869250215225e-05,
+ "loss": 0.8207,
+ "step": 2242
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2668863736717855e-05,
+ "loss": 0.8879,
+ "step": 2243
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2662857186664558e-05,
+ "loss": 0.8753,
+ "step": 2244
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2656849602388222e-05,
+ "loss": 0.8731,
+ "step": 2245
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2650840986222111e-05,
+ "loss": 0.8443,
+ "step": 2246
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2644831340499906e-05,
+ "loss": 0.839,
+ "step": 2247
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2638820667555685e-05,
+ "loss": 0.9184,
+ "step": 2248
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2632808969723927e-05,
+ "loss": 0.8163,
+ "step": 2249
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.26267962493395e-05,
+ "loss": 0.9418,
+ "step": 2250
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2620782508737678e-05,
+ "loss": 0.8629,
+ "step": 2251
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614767750254129e-05,
+ "loss": 0.3414,
+ "step": 2252
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2608751976224916e-05,
+ "loss": 0.8288,
+ "step": 2253
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2602735188986498e-05,
+ "loss": 0.9099,
+ "step": 2254
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2596717390875721e-05,
+ "loss": 0.8752,
+ "step": 2255
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2590698584229834e-05,
+ "loss": 0.9098,
+ "step": 2256
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2584678771386467e-05,
+ "loss": 0.9381,
+ "step": 2257
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2578657954683651e-05,
+ "loss": 0.88,
+ "step": 2258
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2572636136459799e-05,
+ "loss": 0.9191,
+ "step": 2259
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2566613319053713e-05,
+ "loss": 0.851,
+ "step": 2260
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2560589504804592e-05,
+ "loss": 0.9334,
+ "step": 2261
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2554564696052011e-05,
+ "loss": 0.9118,
+ "step": 2262
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2548538895135942e-05,
+ "loss": 0.33,
+ "step": 2263
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.254251210439673e-05,
+ "loss": 0.8842,
+ "step": 2264
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2536484326175114e-05,
+ "loss": 0.893,
+ "step": 2265
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2530455562812214e-05,
+ "loss": 0.8388,
+ "step": 2266
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252442581664953e-05,
+ "loss": 0.8895,
+ "step": 2267
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2518395090028952e-05,
+ "loss": 0.946,
+ "step": 2268
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2512363385292739e-05,
+ "loss": 0.8648,
+ "step": 2269
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2506330704783533e-05,
+ "loss": 0.9077,
+ "step": 2270
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2500297050844367e-05,
+ "loss": 0.908,
+ "step": 2271
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2494262425818637e-05,
+ "loss": 0.9121,
+ "step": 2272
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2488226832050116e-05,
+ "loss": 0.9672,
+ "step": 2273
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2482190271882973e-05,
+ "loss": 0.8562,
+ "step": 2274
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2476152747661727e-05,
+ "loss": 0.8644,
+ "step": 2275
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2470114261731288e-05,
+ "loss": 0.8771,
+ "step": 2276
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.246407481643693e-05,
+ "loss": 0.8566,
+ "step": 2277
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.245803441412431e-05,
+ "loss": 0.8637,
+ "step": 2278
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451993057139445e-05,
+ "loss": 0.882,
+ "step": 2279
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2445950747828732e-05,
+ "loss": 0.8815,
+ "step": 2280
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439907488538934e-05,
+ "loss": 0.869,
+ "step": 2281
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.243386328161718e-05,
+ "loss": 0.9532,
+ "step": 2282
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2427818129410975e-05,
+ "loss": 0.8998,
+ "step": 2283
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2421772034268187e-05,
+ "loss": 0.8572,
+ "step": 2284
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2415724998537042e-05,
+ "loss": 0.8814,
+ "step": 2285
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2409677024566145e-05,
+ "loss": 0.9488,
+ "step": 2286
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.240362811470446e-05,
+ "loss": 0.8974,
+ "step": 2287
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2397578271301312e-05,
+ "loss": 0.9158,
+ "step": 2288
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2391527496706389e-05,
+ "loss": 0.8891,
+ "step": 2289
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2385475793269744e-05,
+ "loss": 0.8739,
+ "step": 2290
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2379423163341791e-05,
+ "loss": 0.8984,
+ "step": 2291
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2373369609273299e-05,
+ "loss": 0.8428,
+ "step": 2292
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2367315133415396e-05,
+ "loss": 0.8553,
+ "step": 2293
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2361259738119575e-05,
+ "loss": 0.8725,
+ "step": 2294
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2355203425737683e-05,
+ "loss": 0.8022,
+ "step": 2295
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2349146198621917e-05,
+ "loss": 0.8851,
+ "step": 2296
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2343088059124839e-05,
+ "loss": 0.8787,
+ "step": 2297
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2337029009599357e-05,
+ "loss": 0.3428,
+ "step": 2298
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2330969052398735e-05,
+ "loss": 0.9392,
+ "step": 2299
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2324908189876597e-05,
+ "loss": 0.8819,
+ "step": 2300
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2318846424386907e-05,
+ "loss": 0.9341,
+ "step": 2301
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2312783758283981e-05,
+ "loss": 0.9437,
+ "step": 2302
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.23067201939225e-05,
+ "loss": 0.972,
+ "step": 2303
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300655733657475e-05,
+ "loss": 0.844,
+ "step": 2304
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2294590379844268e-05,
+ "loss": 0.8892,
+ "step": 2305
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288524134838602e-05,
+ "loss": 0.9031,
+ "step": 2306
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2282457000996533e-05,
+ "loss": 0.9197,
+ "step": 2307
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2276388980674465e-05,
+ "loss": 0.8836,
+ "step": 2308
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.227032007622915e-05,
+ "loss": 0.8728,
+ "step": 2309
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2264250290017675e-05,
+ "loss": 0.8441,
+ "step": 2310
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2258179624397477e-05,
+ "loss": 0.7777,
+ "step": 2311
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2252108081726337e-05,
+ "loss": 0.8415,
+ "step": 2312
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.224603566436237e-05,
+ "loss": 0.9049,
+ "step": 2313
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2239962374664029e-05,
+ "loss": 0.8827,
+ "step": 2314
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2233888214990113e-05,
+ "loss": 0.3561,
+ "step": 2315
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2227813187699757e-05,
+ "loss": 0.8743,
+ "step": 2316
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.222173729515243e-05,
+ "loss": 0.8895,
+ "step": 2317
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2215660539707936e-05,
+ "loss": 0.9133,
+ "step": 2318
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2209582923726424e-05,
+ "loss": 0.8837,
+ "step": 2319
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2203504449568361e-05,
+ "loss": 0.9187,
+ "step": 2320
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2197425119594563e-05,
+ "loss": 0.9186,
+ "step": 2321
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.219134493616617e-05,
+ "loss": 0.8447,
+ "step": 2322
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2185263901644653e-05,
+ "loss": 0.8874,
+ "step": 2323
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.217918201839182e-05,
+ "loss": 0.9798,
+ "step": 2324
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2173099288769799e-05,
+ "loss": 0.913,
+ "step": 2325
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2167015715141057e-05,
+ "loss": 0.9146,
+ "step": 2326
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.216093129986838e-05,
+ "loss": 0.8392,
+ "step": 2327
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2154846045314885e-05,
+ "loss": 0.8755,
+ "step": 2328
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.214875995384402e-05,
+ "loss": 0.8648,
+ "step": 2329
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.214267302781955e-05,
+ "loss": 0.8526,
+ "step": 2330
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136585269605558e-05,
+ "loss": 0.9036,
+ "step": 2331
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2130496681566475e-05,
+ "loss": 0.8257,
+ "step": 2332
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.212440726606703e-05,
+ "loss": 0.8487,
+ "step": 2333
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.211831702547228e-05,
+ "loss": 0.8688,
+ "step": 2334
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2112225962147605e-05,
+ "loss": 0.8857,
+ "step": 2335
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.210613407845871e-05,
+ "loss": 0.3028,
+ "step": 2336
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2100041376771605e-05,
+ "loss": 0.849,
+ "step": 2337
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209394785945263e-05,
+ "loss": 0.8291,
+ "step": 2338
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2087853528868432e-05,
+ "loss": 0.8964,
+ "step": 2339
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2081758387385982e-05,
+ "loss": 0.8533,
+ "step": 2340
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2075662437372567e-05,
+ "loss": 0.9254,
+ "step": 2341
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2069565681195776e-05,
+ "loss": 0.34,
+ "step": 2342
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.206346812122352e-05,
+ "loss": 0.8912,
+ "step": 2343
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2057369759824025e-05,
+ "loss": 0.9265,
+ "step": 2344
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2051270599365825e-05,
+ "loss": 0.9092,
+ "step": 2345
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2045170642217756e-05,
+ "loss": 0.8999,
+ "step": 2346
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2039069890748978e-05,
+ "loss": 0.9081,
+ "step": 2347
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2032968347328952e-05,
+ "loss": 0.8721,
+ "step": 2348
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2026866014327446e-05,
+ "loss": 0.92,
+ "step": 2349
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2020762894114535e-05,
+ "loss": 0.8671,
+ "step": 2350
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.20146589890606e-05,
+ "loss": 0.902,
+ "step": 2351
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2008554301536328e-05,
+ "loss": 0.8555,
+ "step": 2352
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2002448833912712e-05,
+ "loss": 0.8908,
+ "step": 2353
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1996342588561042e-05,
+ "loss": 0.862,
+ "step": 2354
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1990235567852917e-05,
+ "loss": 0.8971,
+ "step": 2355
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984127774160226e-05,
+ "loss": 0.8591,
+ "step": 2356
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1978019209855174e-05,
+ "loss": 0.8669,
+ "step": 2357
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1971909877310253e-05,
+ "loss": 0.807,
+ "step": 2358
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1965799778898258e-05,
+ "loss": 0.9165,
+ "step": 2359
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1959688916992279e-05,
+ "loss": 0.8866,
+ "step": 2360
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1953577293965707e-05,
+ "loss": 0.9545,
+ "step": 2361
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1947464912192228e-05,
+ "loss": 0.953,
+ "step": 2362
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1941351774045815e-05,
+ "loss": 0.3617,
+ "step": 2363
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1935237881900743e-05,
+ "loss": 0.8426,
+ "step": 2364
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1929123238131579e-05,
+ "loss": 0.8996,
+ "step": 2365
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1923007845113178e-05,
+ "loss": 0.9363,
+ "step": 2366
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1916891705220689e-05,
+ "loss": 0.792,
+ "step": 2367
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.191077482082955e-05,
+ "loss": 0.8611,
+ "step": 2368
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1904657194315486e-05,
+ "loss": 0.8716,
+ "step": 2369
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1898538828054517e-05,
+ "loss": 0.8647,
+ "step": 2370
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1892419724422946e-05,
+ "loss": 0.8368,
+ "step": 2371
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1886299885797357e-05,
+ "loss": 0.8444,
+ "step": 2372
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1880179314554629e-05,
+ "loss": 0.8683,
+ "step": 2373
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1874058013071923e-05,
+ "loss": 0.8961,
+ "step": 2374
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1867935983726676e-05,
+ "loss": 0.8572,
+ "step": 2375
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.186181322889662e-05,
+ "loss": 0.8506,
+ "step": 2376
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1855689750959759e-05,
+ "loss": 0.8712,
+ "step": 2377
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1849565552294379e-05,
+ "loss": 0.3329,
+ "step": 2378
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1843440635279056e-05,
+ "loss": 0.893,
+ "step": 2379
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1837315002292629e-05,
+ "loss": 0.8584,
+ "step": 2380
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1831188655714225e-05,
+ "loss": 0.876,
+ "step": 2381
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.182506159792325e-05,
+ "loss": 0.9164,
+ "step": 2382
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1818933831299381e-05,
+ "loss": 0.3203,
+ "step": 2383
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1812805358222571e-05,
+ "loss": 0.8582,
+ "step": 2384
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.180667618107305e-05,
+ "loss": 0.8843,
+ "step": 2385
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1800546302231317e-05,
+ "loss": 0.8675,
+ "step": 2386
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1794415724078147e-05,
+ "loss": 0.834,
+ "step": 2387
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1788284448994588e-05,
+ "loss": 0.8438,
+ "step": 2388
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1782152479361956e-05,
+ "loss": 0.8646,
+ "step": 2389
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1776019817561834e-05,
+ "loss": 0.9151,
+ "step": 2390
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769886465976086e-05,
+ "loss": 0.8176,
+ "step": 2391
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1763752426986823e-05,
+ "loss": 0.9262,
+ "step": 2392
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1757617702976443e-05,
+ "loss": 0.832,
+ "step": 2393
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.17514822963276e-05,
+ "loss": 0.8366,
+ "step": 2394
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1745346209423216e-05,
+ "loss": 0.858,
+ "step": 2395
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1739209444646479e-05,
+ "loss": 0.3131,
+ "step": 2396
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1733072004380827e-05,
+ "loss": 0.9138,
+ "step": 2397
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1726933891009985e-05,
+ "loss": 0.847,
+ "step": 2398
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1720795106917917e-05,
+ "loss": 0.927,
+ "step": 2399
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.171465565448886e-05,
+ "loss": 0.8945,
+ "step": 2400
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1708515536107299e-05,
+ "loss": 0.8702,
+ "step": 2401
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1702374754157998e-05,
+ "loss": 0.9371,
+ "step": 2402
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1696233311025957e-05,
+ "loss": 0.8477,
+ "step": 2403
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1690091209096441e-05,
+ "loss": 0.8746,
+ "step": 2404
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1683948450754976e-05,
+ "loss": 0.8714,
+ "step": 2405
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1677805038387337e-05,
+ "loss": 0.8483,
+ "step": 2406
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1671660974379554e-05,
+ "loss": 0.8766,
+ "step": 2407
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1665516261117914e-05,
+ "loss": 0.8786,
+ "step": 2408
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1659370900988946e-05,
+ "loss": 0.9222,
+ "step": 2409
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.165322489637944e-05,
+ "loss": 0.8537,
+ "step": 2410
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.164707824967644e-05,
+ "loss": 0.3372,
+ "step": 2411
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1640930963267226e-05,
+ "loss": 0.8816,
+ "step": 2412
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1634783039539328e-05,
+ "loss": 0.91,
+ "step": 2413
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.162863448088054e-05,
+ "loss": 0.8229,
+ "step": 2414
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1622485289678886e-05,
+ "loss": 0.8628,
+ "step": 2415
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1616335468322641e-05,
+ "loss": 0.929,
+ "step": 2416
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1610185019200324e-05,
+ "loss": 0.8675,
+ "step": 2417
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1604033944700701e-05,
+ "loss": 0.8585,
+ "step": 2418
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1597882247212776e-05,
+ "loss": 0.8846,
+ "step": 2419
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.15917299291258e-05,
+ "loss": 0.9459,
+ "step": 2420
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1585576992829261e-05,
+ "loss": 0.8492,
+ "step": 2421
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1579423440712887e-05,
+ "loss": 0.8401,
+ "step": 2422
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1573269275166652e-05,
+ "loss": 0.839,
+ "step": 2423
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1567114498580758e-05,
+ "loss": 0.876,
+ "step": 2424
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1560959113345649e-05,
+ "loss": 0.8728,
+ "step": 2425
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1554803121852005e-05,
+ "loss": 0.8891,
+ "step": 2426
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1548646526490749e-05,
+ "loss": 0.8827,
+ "step": 2427
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1542489329653024e-05,
+ "loss": 0.8866,
+ "step": 2428
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.153633153373022e-05,
+ "loss": 0.3399,
+ "step": 2429
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1530173141113947e-05,
+ "loss": 0.9129,
+ "step": 2430
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1524014154196063e-05,
+ "loss": 0.8873,
+ "step": 2431
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1517854575368644e-05,
+ "loss": 0.9095,
+ "step": 2432
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1511694407023994e-05,
+ "loss": 0.845,
+ "step": 2433
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1505533651554654e-05,
+ "loss": 0.9395,
+ "step": 2434
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1499372311353398e-05,
+ "loss": 0.8332,
+ "step": 2435
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.149321038881321e-05,
+ "loss": 0.8609,
+ "step": 2436
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1487047886327314e-05,
+ "loss": 0.8932,
+ "step": 2437
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1480884806289151e-05,
+ "loss": 0.8232,
+ "step": 2438
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1474721151092397e-05,
+ "loss": 0.8671,
+ "step": 2439
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1468556923130943e-05,
+ "loss": 0.3353,
+ "step": 2440
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14623921247989e-05,
+ "loss": 0.8704,
+ "step": 2441
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1456226758490603e-05,
+ "loss": 0.8445,
+ "step": 2442
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1450060826600618e-05,
+ "loss": 0.814,
+ "step": 2443
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1443894331523718e-05,
+ "loss": 0.8857,
+ "step": 2444
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437727275654893e-05,
+ "loss": 0.9106,
+ "step": 2445
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1431559661389362e-05,
+ "loss": 0.8441,
+ "step": 2446
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1425391491122557e-05,
+ "loss": 0.3749,
+ "step": 2447
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.141922276725012e-05,
+ "loss": 0.8948,
+ "step": 2448
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1413053492167915e-05,
+ "loss": 0.9103,
+ "step": 2449
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1406883668272015e-05,
+ "loss": 0.8903,
+ "step": 2450
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.140071329795871e-05,
+ "loss": 0.8281,
+ "step": 2451
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.13945423836245e-05,
+ "loss": 0.9291,
+ "step": 2452
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1388370927666102e-05,
+ "loss": 0.8681,
+ "step": 2453
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1382198932480429e-05,
+ "loss": 0.84,
+ "step": 2454
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1376026400464616e-05,
+ "loss": 0.884,
+ "step": 2455
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.136985333401601e-05,
+ "loss": 0.9238,
+ "step": 2456
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1363679735532151e-05,
+ "loss": 0.8118,
+ "step": 2457
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1357505607410797e-05,
+ "loss": 0.8997,
+ "step": 2458
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1351330952049908e-05,
+ "loss": 0.3291,
+ "step": 2459
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1345155771847646e-05,
+ "loss": 0.8934,
+ "step": 2460
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1338980069202388e-05,
+ "loss": 0.839,
+ "step": 2461
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1332803846512697e-05,
+ "loss": 0.8599,
+ "step": 2462
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1326627106177348e-05,
+ "loss": 0.8432,
+ "step": 2463
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.132044985059532e-05,
+ "loss": 0.8947,
+ "step": 2464
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1314272082165785e-05,
+ "loss": 0.8495,
+ "step": 2465
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1308093803288119e-05,
+ "loss": 0.9166,
+ "step": 2466
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130191501636189e-05,
+ "loss": 0.8832,
+ "step": 2467
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1295735723786872e-05,
+ "loss": 0.8477,
+ "step": 2468
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1289555927963032e-05,
+ "loss": 0.8675,
+ "step": 2469
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283375631290528e-05,
+ "loss": 0.8931,
+ "step": 2470
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1277194836169714e-05,
+ "loss": 0.8718,
+ "step": 2471
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1271013545001144e-05,
+ "loss": 0.9066,
+ "step": 2472
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1264831760185562e-05,
+ "loss": 0.9353,
+ "step": 2473
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1258649484123895e-05,
+ "loss": 0.83,
+ "step": 2474
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1252466719217274e-05,
+ "loss": 0.8601,
+ "step": 2475
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1246283467867012e-05,
+ "loss": 0.856,
+ "step": 2476
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1240099732474613e-05,
+ "loss": 0.8202,
+ "step": 2477
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1233915515441765e-05,
+ "loss": 0.8848,
+ "step": 2478
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1227730819170349e-05,
+ "loss": 0.8669,
+ "step": 2479
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1221545646062431e-05,
+ "loss": 0.8553,
+ "step": 2480
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.121535999852026e-05,
+ "loss": 0.9064,
+ "step": 2481
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1209173878946271e-05,
+ "loss": 0.8003,
+ "step": 2482
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1202987289743078e-05,
+ "loss": 0.7714,
+ "step": 2483
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1196800233313488e-05,
+ "loss": 0.8098,
+ "step": 2484
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1190612712060475e-05,
+ "loss": 0.9308,
+ "step": 2485
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1184424728387204e-05,
+ "loss": 0.8383,
+ "step": 2486
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1178236284697017e-05,
+ "loss": 0.9422,
+ "step": 2487
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1172047383393434e-05,
+ "loss": 0.9478,
+ "step": 2488
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1165858026880151e-05,
+ "loss": 0.8395,
+ "step": 2489
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1159668217561048e-05,
+ "loss": 0.883,
+ "step": 2490
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115347795784017e-05,
+ "loss": 0.8678,
+ "step": 2491
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1147287250121745e-05,
+ "loss": 0.8559,
+ "step": 2492
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1141096096810174e-05,
+ "loss": 0.8701,
+ "step": 2493
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1134904500310029e-05,
+ "loss": 0.9352,
+ "step": 2494
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1128712463026048e-05,
+ "loss": 0.9426,
+ "step": 2495
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1122519987363156e-05,
+ "loss": 0.89,
+ "step": 2496
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116327075726436e-05,
+ "loss": 0.8317,
+ "step": 2497
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1110133730521142e-05,
+ "loss": 0.914,
+ "step": 2498
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.11039399541527e-05,
+ "loss": 0.8242,
+ "step": 2499
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10977457490267e-05,
+ "loss": 0.8806,
+ "step": 2500
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10915511175489e-05,
+ "loss": 0.8181,
+ "step": 2501
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1085356062125225e-05,
+ "loss": 0.8941,
+ "step": 2502
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1079160585161759e-05,
+ "loss": 0.7827,
+ "step": 2503
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.107296468906476e-05,
+ "loss": 0.8903,
+ "step": 2504
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.106676837624064e-05,
+ "loss": 0.9281,
+ "step": 2505
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1060571649095972e-05,
+ "loss": 0.3481,
+ "step": 2506
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.10543745100375e-05,
+ "loss": 0.8976,
+ "step": 2507
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1048176961472114e-05,
+ "loss": 0.8353,
+ "step": 2508
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1041979005806876e-05,
+ "loss": 0.9495,
+ "step": 2509
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1035780645449001e-05,
+ "loss": 0.8773,
+ "step": 2510
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1029581882805857e-05,
+ "loss": 0.9464,
+ "step": 2511
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1023382720284973e-05,
+ "loss": 0.9351,
+ "step": 2512
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1017183160294033e-05,
+ "loss": 0.9049,
+ "step": 2513
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1010983205240878e-05,
+ "loss": 0.8924,
+ "step": 2514
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1004782857533488e-05,
+ "loss": 0.9106,
+ "step": 2515
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.099858211958002e-05,
+ "loss": 0.8567,
+ "step": 2516
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0992380993788763e-05,
+ "loss": 0.82,
+ "step": 2517
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0986179482568162e-05,
+ "loss": 0.8759,
+ "step": 2518
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0979977588326815e-05,
+ "loss": 0.9139,
+ "step": 2519
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0973775313473465e-05,
+ "loss": 0.8956,
+ "step": 2520
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0967572660417001e-05,
+ "loss": 0.7797,
+ "step": 2521
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0961369631566468e-05,
+ "loss": 0.8928,
+ "step": 2522
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0955166229331048e-05,
+ "loss": 0.8539,
+ "step": 2523
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0948962456120068e-05,
+ "loss": 0.8613,
+ "step": 2524
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0942758314343007e-05,
+ "loss": 0.8727,
+ "step": 2525
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936553806409482e-05,
+ "loss": 0.9138,
+ "step": 2526
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0930348934729249e-05,
+ "loss": 0.9008,
+ "step": 2527
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0924143701712211e-05,
+ "loss": 0.9609,
+ "step": 2528
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0917938109768404e-05,
+ "loss": 0.9213,
+ "step": 2529
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0911732161308014e-05,
+ "loss": 0.8708,
+ "step": 2530
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0905525858741364e-05,
+ "loss": 0.9063,
+ "step": 2531
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.08993192044789e-05,
+ "loss": 0.8935,
+ "step": 2532
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.089311220093122e-05,
+ "loss": 0.8919,
+ "step": 2533
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0886904850509052e-05,
+ "loss": 0.9275,
+ "step": 2534
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0880697155623264e-05,
+ "loss": 0.8895,
+ "step": 2535
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0874489118684846e-05,
+ "loss": 0.84,
+ "step": 2536
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.086828074210493e-05,
+ "loss": 0.8531,
+ "step": 2537
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0862072028294777e-05,
+ "loss": 0.8842,
+ "step": 2538
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0855862979665788e-05,
+ "loss": 0.8564,
+ "step": 2539
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0849653598629477e-05,
+ "loss": 0.8437,
+ "step": 2540
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0843443887597495e-05,
+ "loss": 0.9083,
+ "step": 2541
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0837233848981632e-05,
+ "loss": 0.8496,
+ "step": 2542
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0831023485193787e-05,
+ "loss": 0.876,
+ "step": 2543
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0824812798645997e-05,
+ "loss": 0.8915,
+ "step": 2544
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0818601791750418e-05,
+ "loss": 0.8835,
+ "step": 2545
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0812390466919337e-05,
+ "loss": 0.894,
+ "step": 2546
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0806178826565162e-05,
+ "loss": 0.9062,
+ "step": 2547
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0799966873100419e-05,
+ "loss": 0.9062,
+ "step": 2548
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0793754608937758e-05,
+ "loss": 0.843,
+ "step": 2549
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0787542036489955e-05,
+ "loss": 0.8888,
+ "step": 2550
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781329158169902e-05,
+ "loss": 0.8704,
+ "step": 2551
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0775115976390607e-05,
+ "loss": 0.9119,
+ "step": 2552
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0768902493565197e-05,
+ "loss": 0.7724,
+ "step": 2553
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0762688712106918e-05,
+ "loss": 0.9035,
+ "step": 2554
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0756474634429133e-05,
+ "loss": 0.9005,
+ "step": 2555
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0750260262945314e-05,
+ "loss": 0.8955,
+ "step": 2556
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0744045600069055e-05,
+ "loss": 0.3451,
+ "step": 2557
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0737830648214063e-05,
+ "loss": 0.8785,
+ "step": 2558
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0731615409794144e-05,
+ "loss": 0.8356,
+ "step": 2559
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0725399887223234e-05,
+ "loss": 0.8944,
+ "step": 2560
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0719184082915364e-05,
+ "loss": 0.8373,
+ "step": 2561
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0712967999284682e-05,
+ "loss": 0.8753,
+ "step": 2562
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0706751638745448e-05,
+ "loss": 0.7854,
+ "step": 2563
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0700535003712023e-05,
+ "loss": 0.9136,
+ "step": 2564
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.069431809659887e-05,
+ "loss": 0.8197,
+ "step": 2565
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.068810091982057e-05,
+ "loss": 0.8508,
+ "step": 2566
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0681883475791803e-05,
+ "loss": 0.9265,
+ "step": 2567
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067566576692735e-05,
+ "loss": 0.8755,
+ "step": 2568
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0669447795642103e-05,
+ "loss": 0.9198,
+ "step": 2569
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.066322956435104e-05,
+ "loss": 0.8855,
+ "step": 2570
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.065701107546926e-05,
+ "loss": 0.8474,
+ "step": 2571
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.065079233141195e-05,
+ "loss": 0.8718,
+ "step": 2572
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0644573334594395e-05,
+ "loss": 0.8611,
+ "step": 2573
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0638354087431986e-05,
+ "loss": 0.8439,
+ "step": 2574
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0632134592340204e-05,
+ "loss": 0.8367,
+ "step": 2575
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0625914851734632e-05,
+ "loss": 0.852,
+ "step": 2576
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0619694868030943e-05,
+ "loss": 0.92,
+ "step": 2577
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613474643644907e-05,
+ "loss": 0.8508,
+ "step": 2578
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0607254180992391e-05,
+ "loss": 0.8605,
+ "step": 2579
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0601033482489346e-05,
+ "loss": 0.8165,
+ "step": 2580
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0594812550551826e-05,
+ "loss": 0.8475,
+ "step": 2581
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.058859138759596e-05,
+ "loss": 0.8574,
+ "step": 2582
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0582369996037985e-05,
+ "loss": 0.8432,
+ "step": 2583
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0576148378294213e-05,
+ "loss": 0.9228,
+ "step": 2584
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.056992653678105e-05,
+ "loss": 0.8932,
+ "step": 2585
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0563704473914986e-05,
+ "loss": 0.8628,
+ "step": 2586
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0557482192112603e-05,
+ "loss": 0.8452,
+ "step": 2587
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0551259693790556e-05,
+ "loss": 0.8891,
+ "step": 2588
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0545036981365601e-05,
+ "loss": 0.8311,
+ "step": 2589
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.053881405725456e-05,
+ "loss": 0.8099,
+ "step": 2590
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0532590923874349e-05,
+ "loss": 0.8757,
+ "step": 2591
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0526367583641958e-05,
+ "loss": 0.9395,
+ "step": 2592
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0520144038974468e-05,
+ "loss": 0.8493,
+ "step": 2593
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0513920292289021e-05,
+ "loss": 0.9005,
+ "step": 2594
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0507696346002857e-05,
+ "loss": 0.8898,
+ "step": 2595
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0501472202533285e-05,
+ "loss": 0.9562,
+ "step": 2596
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0495247864297684e-05,
+ "loss": 0.7879,
+ "step": 2597
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0489023333713522e-05,
+ "loss": 0.8574,
+ "step": 2598
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0482798613198328e-05,
+ "loss": 0.8309,
+ "step": 2599
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.047657370516972e-05,
+ "loss": 0.8986,
+ "step": 2600
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0470348612045376e-05,
+ "loss": 0.8437,
+ "step": 2601
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0464123336243049e-05,
+ "loss": 0.3316,
+ "step": 2602
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0457897880180566e-05,
+ "loss": 0.9496,
+ "step": 2603
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0451672246275826e-05,
+ "loss": 0.8466,
+ "step": 2604
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0445446436946788e-05,
+ "loss": 0.8402,
+ "step": 2605
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0439220454611486e-05,
+ "loss": 0.8794,
+ "step": 2606
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0432994301688021e-05,
+ "loss": 0.8938,
+ "step": 2607
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0426767980594559e-05,
+ "loss": 0.8828,
+ "step": 2608
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420541493749332e-05,
+ "loss": 0.9552,
+ "step": 2609
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0414314843570634e-05,
+ "loss": 0.7741,
+ "step": 2610
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0408088032476822e-05,
+ "loss": 0.8506,
+ "step": 2611
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0401861062886324e-05,
+ "loss": 0.8795,
+ "step": 2612
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0395633937217622e-05,
+ "loss": 0.8442,
+ "step": 2613
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0389406657889254e-05,
+ "loss": 0.8694,
+ "step": 2614
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0383179227319826e-05,
+ "loss": 0.3218,
+ "step": 2615
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0376951647928007e-05,
+ "loss": 0.9393,
+ "step": 2616
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0370723922132506e-05,
+ "loss": 0.827,
+ "step": 2617
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.036449605235211e-05,
+ "loss": 0.9167,
+ "step": 2618
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0358268041005644e-05,
+ "loss": 0.9681,
+ "step": 2619
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0352039890511997e-05,
+ "loss": 0.8009,
+ "step": 2620
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.034581160329012e-05,
+ "loss": 0.8019,
+ "step": 2621
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0339583181758997e-05,
+ "loss": 0.8748,
+ "step": 2622
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.033335462833768e-05,
+ "loss": 0.8858,
+ "step": 2623
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0327125945445265e-05,
+ "loss": 0.8629,
+ "step": 2624
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0320897135500904e-05,
+ "loss": 0.8512,
+ "step": 2625
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0314668200923791e-05,
+ "loss": 0.8555,
+ "step": 2626
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0308439144133177e-05,
+ "loss": 0.3383,
+ "step": 2627
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0302209967548354e-05,
+ "loss": 0.833,
+ "step": 2628
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.029598067358866e-05,
+ "loss": 0.8692,
+ "step": 2629
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0289751264673485e-05,
+ "loss": 0.2985,
+ "step": 2630
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0283521743222256e-05,
+ "loss": 0.8183,
+ "step": 2631
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0277292111654447e-05,
+ "loss": 0.9011,
+ "step": 2632
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0271062372389582e-05,
+ "loss": 0.8701,
+ "step": 2633
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0264832527847212e-05,
+ "loss": 0.8217,
+ "step": 2634
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0258602580446941e-05,
+ "loss": 0.8651,
+ "step": 2635
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0252372532608405e-05,
+ "loss": 0.8406,
+ "step": 2636
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.024614238675129e-05,
+ "loss": 0.9297,
+ "step": 2637
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0239912145295303e-05,
+ "loss": 0.8563,
+ "step": 2638
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0233681810660207e-05,
+ "loss": 0.8697,
+ "step": 2639
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0227451385265788e-05,
+ "loss": 0.806,
+ "step": 2640
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.022122087153187e-05,
+ "loss": 0.9589,
+ "step": 2641
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0214990271878319e-05,
+ "loss": 0.8727,
+ "step": 2642
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0208759588725016e-05,
+ "loss": 0.9049,
+ "step": 2643
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0202528824491899e-05,
+ "loss": 0.8293,
+ "step": 2644
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0196297981598921e-05,
+ "loss": 0.8461,
+ "step": 2645
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.019006706246607e-05,
+ "loss": 0.8191,
+ "step": 2646
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.018383606951336e-05,
+ "loss": 0.8551,
+ "step": 2647
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0177605005160837e-05,
+ "loss": 0.8259,
+ "step": 2648
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0171373871828578e-05,
+ "loss": 0.8619,
+ "step": 2649
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0165142671936685e-05,
+ "loss": 0.8181,
+ "step": 2650
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0158911407905279e-05,
+ "loss": 0.8691,
+ "step": 2651
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0152680082154514e-05,
+ "loss": 0.8625,
+ "step": 2652
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0146448697104561e-05,
+ "loss": 0.8801,
+ "step": 2653
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0140217255175626e-05,
+ "loss": 0.8809,
+ "step": 2654
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.013398575878792e-05,
+ "loss": 0.3107,
+ "step": 2655
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0127754210361694e-05,
+ "loss": 0.8273,
+ "step": 2656
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0121522612317204e-05,
+ "loss": 0.912,
+ "step": 2657
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.011529096707473e-05,
+ "loss": 0.8444,
+ "step": 2658
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109059277054574e-05,
+ "loss": 0.3209,
+ "step": 2659
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.010282754467705e-05,
+ "loss": 0.9094,
+ "step": 2660
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0096595772362492e-05,
+ "loss": 0.8436,
+ "step": 2661
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0090363962531251e-05,
+ "loss": 0.3562,
+ "step": 2662
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084132117603689e-05,
+ "loss": 0.852,
+ "step": 2663
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0077900240000181e-05,
+ "loss": 0.7994,
+ "step": 2664
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071668332141115e-05,
+ "loss": 0.8901,
+ "step": 2665
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0065436396446899e-05,
+ "loss": 0.8518,
+ "step": 2666
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0059204435337938e-05,
+ "loss": 0.9159,
+ "step": 2667
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0052972451234656e-05,
+ "loss": 0.8454,
+ "step": 2668
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0046740446557485e-05,
+ "loss": 0.8894,
+ "step": 2669
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0040508423726865e-05,
+ "loss": 0.8484,
+ "step": 2670
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0034276385163238e-05,
+ "loss": 0.8848,
+ "step": 2671
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0028044333287056e-05,
+ "loss": 0.3464,
+ "step": 2672
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.002181227051878e-05,
+ "loss": 0.8401,
+ "step": 2673
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0015580199278873e-05,
+ "loss": 0.8855,
+ "step": 2674
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0009348121987795e-05,
+ "loss": 0.8782,
+ "step": 2675
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.000311604106601e-05,
+ "loss": 0.9418,
+ "step": 2676
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.996883958933993e-06,
+ "loss": 0.8429,
+ "step": 2677
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.99065187801221e-06,
+ "loss": 0.8959,
+ "step": 2678
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.984419800721132e-06,
+ "loss": 0.8356,
+ "step": 2679
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.978187729481218e-06,
+ "loss": 0.8438,
+ "step": 2680
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.971955666712945e-06,
+ "loss": 0.9788,
+ "step": 2681
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.965723614836764e-06,
+ "loss": 0.812,
+ "step": 2682
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.959491576273139e-06,
+ "loss": 0.9521,
+ "step": 2683
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.95325955344252e-06,
+ "loss": 0.8228,
+ "step": 2684
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.947027548765347e-06,
+ "loss": 0.8729,
+ "step": 2685
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.940795564662064e-06,
+ "loss": 0.9111,
+ "step": 2686
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.934563603553103e-06,
+ "loss": 0.847,
+ "step": 2687
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.928331667858886e-06,
+ "loss": 0.8866,
+ "step": 2688
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.922099759999822e-06,
+ "loss": 0.8624,
+ "step": 2689
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915867882396314e-06,
+ "loss": 0.9263,
+ "step": 2690
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.909636037468754e-06,
+ "loss": 0.8387,
+ "step": 2691
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.90340422763751e-06,
+ "loss": 0.8609,
+ "step": 2692
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.897172455322953e-06,
+ "loss": 0.8881,
+ "step": 2693
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890940722945429e-06,
+ "loss": 0.8277,
+ "step": 2694
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.884709032925274e-06,
+ "loss": 0.8753,
+ "step": 2695
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.878477387682801e-06,
+ "loss": 0.8395,
+ "step": 2696
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.872245789638308e-06,
+ "loss": 0.8026,
+ "step": 2697
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.866014241212078e-06,
+ "loss": 0.882,
+ "step": 2698
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.859782744824376e-06,
+ "loss": 0.8545,
+ "step": 2699
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.85355130289544e-06,
+ "loss": 0.9182,
+ "step": 2700
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.84731991784549e-06,
+ "loss": 0.8645,
+ "step": 2701
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.841088592094726e-06,
+ "loss": 0.8556,
+ "step": 2702
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.834857328063316e-06,
+ "loss": 0.8338,
+ "step": 2703
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.828626128171422e-06,
+ "loss": 0.8932,
+ "step": 2704
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.822394994839164e-06,
+ "loss": 0.8671,
+ "step": 2705
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.816163930486643e-06,
+ "loss": 0.881,
+ "step": 2706
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.809932937533935e-06,
+ "loss": 0.9008,
+ "step": 2707
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.803702018401084e-06,
+ "loss": 0.8434,
+ "step": 2708
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.797471175508101e-06,
+ "loss": 0.8887,
+ "step": 2709
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.791240411274982e-06,
+ "loss": 0.8327,
+ "step": 2710
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.785009728121686e-06,
+ "loss": 0.8942,
+ "step": 2711
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.778779128468133e-06,
+ "loss": 0.869,
+ "step": 2712
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.772548614734217e-06,
+ "loss": 0.8334,
+ "step": 2713
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.766318189339798e-06,
+ "loss": 0.8665,
+ "step": 2714
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.760087854704697e-06,
+ "loss": 0.8676,
+ "step": 2715
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.753857613248714e-06,
+ "loss": 0.8543,
+ "step": 2716
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747627467391596e-06,
+ "loss": 0.8593,
+ "step": 2717
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.741397419553062e-06,
+ "loss": 0.8325,
+ "step": 2718
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.735167472152793e-06,
+ "loss": 0.8437,
+ "step": 2719
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.728937627610425e-06,
+ "loss": 0.8573,
+ "step": 2720
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.722707888345553e-06,
+ "loss": 0.9113,
+ "step": 2721
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.716478256777749e-06,
+ "loss": 0.8483,
+ "step": 2722
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.710248735326519e-06,
+ "loss": 0.8847,
+ "step": 2723
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.704019326411344e-06,
+ "loss": 0.8559,
+ "step": 2724
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.697790032451651e-06,
+ "loss": 0.8365,
+ "step": 2725
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.691560855866826e-06,
+ "loss": 0.8783,
+ "step": 2726
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.685331799076208e-06,
+ "loss": 0.9162,
+ "step": 2727
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.6791028644991e-06,
+ "loss": 0.827,
+ "step": 2728
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.672874054554738e-06,
+ "loss": 0.9363,
+ "step": 2729
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.666645371662324e-06,
+ "loss": 0.8583,
+ "step": 2730
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.660416818241007e-06,
+ "loss": 0.8748,
+ "step": 2731
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.654188396709882e-06,
+ "loss": 0.8848,
+ "step": 2732
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.647960109488003e-06,
+ "loss": 0.8939,
+ "step": 2733
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.64173195899436e-06,
+ "loss": 0.8642,
+ "step": 2734
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.635503947647894e-06,
+ "loss": 0.8255,
+ "step": 2735
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.629276077867497e-06,
+ "loss": 0.8527,
+ "step": 2736
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.623048352071998e-06,
+ "loss": 0.8222,
+ "step": 2737
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.616820772680174e-06,
+ "loss": 0.8606,
+ "step": 2738
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.610593342110746e-06,
+ "loss": 0.8496,
+ "step": 2739
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.604366062782381e-06,
+ "loss": 0.8456,
+ "step": 2740
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.598138937113677e-06,
+ "loss": 0.8517,
+ "step": 2741
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.59191196752318e-06,
+ "loss": 0.3751,
+ "step": 2742
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.58568515642937e-06,
+ "loss": 0.845,
+ "step": 2743
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.579458506250668e-06,
+ "loss": 0.9585,
+ "step": 2744
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.573232019405441e-06,
+ "loss": 0.8443,
+ "step": 2745
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.567005698311982e-06,
+ "loss": 0.8783,
+ "step": 2746
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.560779545388517e-06,
+ "loss": 0.8972,
+ "step": 2747
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.554553563053217e-06,
+ "loss": 0.8733,
+ "step": 2748
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.548327753724181e-06,
+ "loss": 0.9073,
+ "step": 2749
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.542102119819436e-06,
+ "loss": 0.8283,
+ "step": 2750
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.535876663756955e-06,
+ "loss": 0.8966,
+ "step": 2751
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.529651387954628e-06,
+ "loss": 0.8656,
+ "step": 2752
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.523426294830284e-06,
+ "loss": 0.817,
+ "step": 2753
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.517201386801675e-06,
+ "loss": 0.9141,
+ "step": 2754
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.510976666286484e-06,
+ "loss": 0.908,
+ "step": 2755
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.504752135702318e-06,
+ "loss": 0.9099,
+ "step": 2756
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.498527797466718e-06,
+ "loss": 0.8117,
+ "step": 2757
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.492303653997146e-06,
+ "loss": 0.8525,
+ "step": 2758
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.48607970771098e-06,
+ "loss": 0.8752,
+ "step": 2759
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.479855961025538e-06,
+ "loss": 0.8283,
+ "step": 2760
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.473632416358045e-06,
+ "loss": 0.3365,
+ "step": 2761
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.467409076125653e-06,
+ "loss": 0.8983,
+ "step": 2762
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.461185942745443e-06,
+ "loss": 0.9038,
+ "step": 2763
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.454963018634402e-06,
+ "loss": 0.8155,
+ "step": 2764
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.448740306209447e-06,
+ "loss": 0.86,
+ "step": 2765
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.442517807887402e-06,
+ "loss": 0.8279,
+ "step": 2766
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.436295526085016e-06,
+ "loss": 0.9015,
+ "step": 2767
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.430073463218952e-06,
+ "loss": 0.8779,
+ "step": 2768
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.423851621705789e-06,
+ "loss": 0.9048,
+ "step": 2769
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.41763000396202e-06,
+ "loss": 0.8563,
+ "step": 2770
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411408612404043e-06,
+ "loss": 0.8487,
+ "step": 2771
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.40518744944818e-06,
+ "loss": 0.8595,
+ "step": 2772
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398966517510654e-06,
+ "loss": 0.9181,
+ "step": 2773
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.39274581900761e-06,
+ "loss": 0.9262,
+ "step": 2774
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386525356355095e-06,
+ "loss": 0.3415,
+ "step": 2775
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.380305131969059e-06,
+ "loss": 0.8506,
+ "step": 2776
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.374085148265372e-06,
+ "loss": 0.833,
+ "step": 2777
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.3678654076598e-06,
+ "loss": 0.8932,
+ "step": 2778
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.361645912568015e-06,
+ "loss": 0.8792,
+ "step": 2779
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.355426665405607e-06,
+ "loss": 0.934,
+ "step": 2780
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.349207668588053e-06,
+ "loss": 0.851,
+ "step": 2781
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.342988924530742e-06,
+ "loss": 0.8563,
+ "step": 2782
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.336770435648963e-06,
+ "loss": 0.8484,
+ "step": 2783
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.330552204357904e-06,
+ "loss": 0.8848,
+ "step": 2784
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.32433423307265e-06,
+ "loss": 0.872,
+ "step": 2785
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.318116524208198e-06,
+ "loss": 0.8068,
+ "step": 2786
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.311899080179433e-06,
+ "loss": 0.8732,
+ "step": 2787
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.305681903401133e-06,
+ "loss": 0.902,
+ "step": 2788
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.299464996287984e-06,
+ "loss": 0.8916,
+ "step": 2789
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.293248361254557e-06,
+ "loss": 0.8251,
+ "step": 2790
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.287032000715318e-06,
+ "loss": 0.8698,
+ "step": 2791
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.28081591708464e-06,
+ "loss": 0.8647,
+ "step": 2792
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.27460011277677e-06,
+ "loss": 0.8747,
+ "step": 2793
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.268384590205858e-06,
+ "loss": 0.8719,
+ "step": 2794
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.262169351785944e-06,
+ "loss": 0.8338,
+ "step": 2795
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.255954399930948e-06,
+ "loss": 0.8776,
+ "step": 2796
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.249739737054686e-06,
+ "loss": 0.8729,
+ "step": 2797
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.24352536557087e-06,
+ "loss": 0.8764,
+ "step": 2798
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.237311287893086e-06,
+ "loss": 0.813,
+ "step": 2799
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.231097506434808e-06,
+ "loss": 0.8445,
+ "step": 2800
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.224884023609398e-06,
+ "loss": 0.8937,
+ "step": 2801
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218670841830098e-06,
+ "loss": 0.8662,
+ "step": 2802
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.212457963510045e-06,
+ "loss": 0.8182,
+ "step": 2803
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.206245391062243e-06,
+ "loss": 0.8764,
+ "step": 2804
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.200033126899585e-06,
+ "loss": 0.8467,
+ "step": 2805
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.193821173434843e-06,
+ "loss": 0.331,
+ "step": 2806
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.187609533080668e-06,
+ "loss": 0.828,
+ "step": 2807
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.181398208249583e-06,
+ "loss": 0.8566,
+ "step": 2808
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.175187201354005e-06,
+ "loss": 0.9304,
+ "step": 2809
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.168976514806216e-06,
+ "loss": 0.8846,
+ "step": 2810
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.162766151018372e-06,
+ "loss": 0.8465,
+ "step": 2811
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.156556112402508e-06,
+ "loss": 0.8302,
+ "step": 2812
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.150346401370528e-06,
+ "loss": 0.8129,
+ "step": 2813
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.144137020334214e-06,
+ "loss": 0.8516,
+ "step": 2814
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.137927971705223e-06,
+ "loss": 0.9398,
+ "step": 2815
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.131719257895074e-06,
+ "loss": 0.8873,
+ "step": 2816
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.125510881315159e-06,
+ "loss": 0.8755,
+ "step": 2817
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.119302844376741e-06,
+ "loss": 0.8555,
+ "step": 2818
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.113095149490951e-06,
+ "loss": 0.8529,
+ "step": 2819
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.106887799068782e-06,
+ "loss": 0.9058,
+ "step": 2820
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.100680795521104e-06,
+ "loss": 0.9051,
+ "step": 2821
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.09447414125864e-06,
+ "loss": 0.9627,
+ "step": 2822
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.088267838691987e-06,
+ "loss": 0.9022,
+ "step": 2823
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.0820618902316e-06,
+ "loss": 0.9208,
+ "step": 2824
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.075856298287796e-06,
+ "loss": 0.3172,
+ "step": 2825
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.069651065270753e-06,
+ "loss": 0.8085,
+ "step": 2826
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.06344619359052e-06,
+ "loss": 0.7775,
+ "step": 2827
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.057241685656995e-06,
+ "loss": 0.8915,
+ "step": 2828
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.051037543879933e-06,
+ "loss": 0.782,
+ "step": 2829
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.044833770668957e-06,
+ "loss": 0.8743,
+ "step": 2830
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.038630368433537e-06,
+ "loss": 0.8376,
+ "step": 2831
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.032427339583e-06,
+ "loss": 0.8785,
+ "step": 2832
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.026224686526539e-06,
+ "loss": 0.9186,
+ "step": 2833
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.020022411673186e-06,
+ "loss": 0.8367,
+ "step": 2834
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.013820517431841e-06,
+ "loss": 0.8397,
+ "step": 2835
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.00761900621124e-06,
+ "loss": 0.88,
+ "step": 2836
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.00141788041998e-06,
+ "loss": 0.8992,
+ "step": 2837
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.99521714246651e-06,
+ "loss": 0.8569,
+ "step": 2838
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.989016794759127e-06,
+ "loss": 0.8422,
+ "step": 2839
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.98281683970597e-06,
+ "loss": 0.8888,
+ "step": 2840
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.97661727971503e-06,
+ "loss": 0.935,
+ "step": 2841
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.970418117194146e-06,
+ "loss": 0.9008,
+ "step": 2842
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.964219354550999e-06,
+ "loss": 0.8679,
+ "step": 2843
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.958020994193124e-06,
+ "loss": 0.8332,
+ "step": 2844
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.951823038527887e-06,
+ "loss": 0.871,
+ "step": 2845
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.945625489962503e-06,
+ "loss": 0.8973,
+ "step": 2846
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.93942835090403e-06,
+ "loss": 0.8367,
+ "step": 2847
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.933231623759365e-06,
+ "loss": 0.8643,
+ "step": 2848
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.927035310935241e-06,
+ "loss": 0.8812,
+ "step": 2849
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.920839414838243e-06,
+ "loss": 0.8782,
+ "step": 2850
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.914643937874778e-06,
+ "loss": 0.866,
+ "step": 2851
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.908448882451104e-06,
+ "loss": 0.8704,
+ "step": 2852
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.902254250973306e-06,
+ "loss": 0.8716,
+ "step": 2853
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.896060045847305e-06,
+ "loss": 0.8317,
+ "step": 2854
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.88986626947886e-06,
+ "loss": 0.8246,
+ "step": 2855
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883672924273566e-06,
+ "loss": 0.8153,
+ "step": 2856
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.877480012636847e-06,
+ "loss": 0.9074,
+ "step": 2857
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.871287536973954e-06,
+ "loss": 0.8408,
+ "step": 2858
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.865095499689978e-06,
+ "loss": 0.3436,
+ "step": 2859
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85890390318983e-06,
+ "loss": 0.9315,
+ "step": 2860
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.852712749878255e-06,
+ "loss": 0.8388,
+ "step": 2861
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.846522042159833e-06,
+ "loss": 0.8739,
+ "step": 2862
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.840331782438954e-06,
+ "loss": 0.8447,
+ "step": 2863
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.83414197311985e-06,
+ "loss": 0.8676,
+ "step": 2864
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.82795261660657e-06,
+ "loss": 0.8118,
+ "step": 2865
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.821763715302986e-06,
+ "loss": 0.8921,
+ "step": 2866
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.815575271612798e-06,
+ "loss": 0.8897,
+ "step": 2867
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.809387287939528e-06,
+ "loss": 0.8711,
+ "step": 2868
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.803199766686517e-06,
+ "loss": 0.8597,
+ "step": 2869
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.797012710256923e-06,
+ "loss": 0.8741,
+ "step": 2870
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.790826121053732e-06,
+ "loss": 0.8237,
+ "step": 2871
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.784640001479741e-06,
+ "loss": 0.8833,
+ "step": 2872
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.77845435393757e-06,
+ "loss": 0.8616,
+ "step": 2873
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.772269180829653e-06,
+ "loss": 0.8781,
+ "step": 2874
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.766084484558237e-06,
+ "loss": 0.9086,
+ "step": 2875
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.759900267525393e-06,
+ "loss": 0.8667,
+ "step": 2876
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.753716532132992e-06,
+ "loss": 0.8897,
+ "step": 2877
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.747533280782725e-06,
+ "loss": 0.8443,
+ "step": 2878
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.741350515876103e-06,
+ "loss": 0.8672,
+ "step": 2879
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.73516823981444e-06,
+ "loss": 0.8794,
+ "step": 2880
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.728986454998858e-06,
+ "loss": 0.8522,
+ "step": 2881
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.72280516383029e-06,
+ "loss": 0.8251,
+ "step": 2882
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716624368709477e-06,
+ "loss": 0.8877,
+ "step": 2883
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.71044407203697e-06,
+ "loss": 0.8925,
+ "step": 2884
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.70426427621313e-06,
+ "loss": 0.8259,
+ "step": 2885
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.698084983638111e-06,
+ "loss": 0.8513,
+ "step": 2886
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.691906196711884e-06,
+ "loss": 0.9092,
+ "step": 2887
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.685727917834218e-06,
+ "loss": 0.8844,
+ "step": 2888
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.679550149404685e-06,
+ "loss": 0.88,
+ "step": 2889
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.673372893822653e-06,
+ "loss": 0.856,
+ "step": 2890
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.667196153487308e-06,
+ "loss": 0.8433,
+ "step": 2891
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.661019930797615e-06,
+ "loss": 0.8484,
+ "step": 2892
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.654844228152355e-06,
+ "loss": 0.9087,
+ "step": 2893
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.648669047950097e-06,
+ "loss": 0.795,
+ "step": 2894
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.642494392589206e-06,
+ "loss": 0.8777,
+ "step": 2895
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.63632026446785e-06,
+ "loss": 0.8304,
+ "step": 2896
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.630146665983993e-06,
+ "loss": 0.8739,
+ "step": 2897
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.623973599535385e-06,
+ "loss": 0.8136,
+ "step": 2898
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.617801067519575e-06,
+ "loss": 0.8462,
+ "step": 2899
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.611629072333905e-06,
+ "loss": 0.8709,
+ "step": 2900
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.605457616375503e-06,
+ "loss": 0.8373,
+ "step": 2901
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.599286702041292e-06,
+ "loss": 0.8282,
+ "step": 2902
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.593116331727987e-06,
+ "loss": 0.8921,
+ "step": 2903
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.586946507832088e-06,
+ "loss": 0.3204,
+ "step": 2904
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.580777232749883e-06,
+ "loss": 0.8323,
+ "step": 2905
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.574608508877448e-06,
+ "loss": 0.8535,
+ "step": 2906
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.568440338610638e-06,
+ "loss": 0.8983,
+ "step": 2907
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.562272724345108e-06,
+ "loss": 0.891,
+ "step": 2908
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.556105668476287e-06,
+ "loss": 0.9547,
+ "step": 2909
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.549939173399385e-06,
+ "loss": 0.8641,
+ "step": 2910
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.5437732415094e-06,
+ "loss": 0.9178,
+ "step": 2911
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.537607875201106e-06,
+ "loss": 0.9223,
+ "step": 2912
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.531443076869058e-06,
+ "loss": 0.886,
+ "step": 2913
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.525278848907603e-06,
+ "loss": 0.8435,
+ "step": 2914
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.51911519371085e-06,
+ "loss": 0.8763,
+ "step": 2915
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.512952113672689e-06,
+ "loss": 0.8662,
+ "step": 2916
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.506789611186794e-06,
+ "loss": 0.8553,
+ "step": 2917
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.500627688646607e-06,
+ "loss": 0.8467,
+ "step": 2918
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.494466348445345e-06,
+ "loss": 0.8956,
+ "step": 2919
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.48830559297601e-06,
+ "loss": 0.9071,
+ "step": 2920
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.48214542463136e-06,
+ "loss": 0.8047,
+ "step": 2921
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.475985845803938e-06,
+ "loss": 0.8647,
+ "step": 2922
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.469826858886054e-06,
+ "loss": 0.8714,
+ "step": 2923
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.463668466269785e-06,
+ "loss": 0.9193,
+ "step": 2924
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.457510670346976e-06,
+ "loss": 0.8771,
+ "step": 2925
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.451353473509254e-06,
+ "loss": 0.8763,
+ "step": 2926
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.445196878147997e-06,
+ "loss": 0.8846,
+ "step": 2927
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.439040886654354e-06,
+ "loss": 0.8258,
+ "step": 2928
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.432885501419248e-06,
+ "loss": 0.8984,
+ "step": 2929
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.426730724833354e-06,
+ "loss": 0.848,
+ "step": 2930
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.420576559287112e-06,
+ "loss": 0.8857,
+ "step": 2931
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.414423007170742e-06,
+ "loss": 0.8956,
+ "step": 2932
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.408270070874201e-06,
+ "loss": 0.8615,
+ "step": 2933
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.402117752787225e-06,
+ "loss": 0.8421,
+ "step": 2934
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.395966055299302e-06,
+ "loss": 0.8771,
+ "step": 2935
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.389814980799679e-06,
+ "loss": 0.905,
+ "step": 2936
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.38366453167736e-06,
+ "loss": 0.8578,
+ "step": 2937
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.377514710321117e-06,
+ "loss": 0.8943,
+ "step": 2938
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.371365519119463e-06,
+ "loss": 0.9173,
+ "step": 2939
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.365216960460675e-06,
+ "loss": 0.8391,
+ "step": 2940
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.359069036732781e-06,
+ "loss": 0.8904,
+ "step": 2941
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.352921750323562e-06,
+ "loss": 0.9559,
+ "step": 2942
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.346775103620559e-06,
+ "loss": 0.7832,
+ "step": 2943
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.340629099011057e-06,
+ "loss": 0.8711,
+ "step": 2944
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.33448373888209e-06,
+ "loss": 0.33,
+ "step": 2945
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.328339025620449e-06,
+ "loss": 0.9021,
+ "step": 2946
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.322194961612668e-06,
+ "loss": 0.8918,
+ "step": 2947
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.316051549245026e-06,
+ "loss": 0.898,
+ "step": 2948
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.309908790903562e-06,
+ "loss": 0.8721,
+ "step": 2949
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.303766688974047e-06,
+ "loss": 0.922,
+ "step": 2950
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.297625245842006e-06,
+ "loss": 0.331,
+ "step": 2951
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.291484463892703e-06,
+ "loss": 0.8646,
+ "step": 2952
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.285344345511147e-06,
+ "loss": 0.8507,
+ "step": 2953
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.279204893082083e-06,
+ "loss": 0.898,
+ "step": 2954
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.273066108990017e-06,
+ "loss": 0.8155,
+ "step": 2955
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.266927995619175e-06,
+ "loss": 0.8892,
+ "step": 2956
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.260790555353526e-06,
+ "loss": 0.8829,
+ "step": 2957
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.254653790576787e-06,
+ "loss": 0.8767,
+ "step": 2958
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.248517703672405e-06,
+ "loss": 0.8603,
+ "step": 2959
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.242382297023558e-06,
+ "loss": 0.8926,
+ "step": 2960
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.23624757301318e-06,
+ "loss": 0.8079,
+ "step": 2961
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.230113534023917e-06,
+ "loss": 0.8841,
+ "step": 2962
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.223980182438167e-06,
+ "loss": 0.8636,
+ "step": 2963
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.217847520638049e-06,
+ "loss": 0.8365,
+ "step": 2964
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.211715551005414e-06,
+ "loss": 0.8443,
+ "step": 2965
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.205584275921854e-06,
+ "loss": 0.7492,
+ "step": 2966
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.199453697768686e-06,
+ "loss": 0.7804,
+ "step": 2967
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.193323818926955e-06,
+ "loss": 0.8401,
+ "step": 2968
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.187194641777432e-06,
+ "loss": 0.8763,
+ "step": 2969
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.181066168700622e-06,
+ "loss": 0.8813,
+ "step": 2970
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.174938402076754e-06,
+ "loss": 0.8827,
+ "step": 2971
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.168811344285776e-06,
+ "loss": 0.8392,
+ "step": 2972
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.162684997707374e-06,
+ "loss": 0.83,
+ "step": 2973
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.156559364720947e-06,
+ "loss": 0.831,
+ "step": 2974
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.150434447705623e-06,
+ "loss": 0.8507,
+ "step": 2975
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.144310249040246e-06,
+ "loss": 0.8886,
+ "step": 2976
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.138186771103382e-06,
+ "loss": 0.842,
+ "step": 2977
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.132064016273325e-06,
+ "loss": 0.8096,
+ "step": 2978
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.12594198692808e-06,
+ "loss": 0.8106,
+ "step": 2979
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.119820685445373e-06,
+ "loss": 0.8398,
+ "step": 2980
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.113700114202647e-06,
+ "loss": 0.8498,
+ "step": 2981
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.107580275577059e-06,
+ "loss": 0.8434,
+ "step": 2982
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.101461171945483e-06,
+ "loss": 0.8361,
+ "step": 2983
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.095342805684516e-06,
+ "loss": 0.8251,
+ "step": 2984
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.089225179170454e-06,
+ "loss": 0.834,
+ "step": 2985
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.083108294779313e-06,
+ "loss": 0.8794,
+ "step": 2986
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.076992154886826e-06,
+ "loss": 0.8278,
+ "step": 2987
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.070876761868426e-06,
+ "loss": 0.3506,
+ "step": 2988
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.064762118099258e-06,
+ "loss": 0.8355,
+ "step": 2989
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.058648225954188e-06,
+ "loss": 0.8349,
+ "step": 2990
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.052535087807774e-06,
+ "loss": 0.8381,
+ "step": 2991
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.046422706034294e-06,
+ "loss": 0.8499,
+ "step": 2992
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.040311083007725e-06,
+ "loss": 0.8846,
+ "step": 2993
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.034200221101746e-06,
+ "loss": 0.8161,
+ "step": 2994
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.028090122689747e-06,
+ "loss": 0.8195,
+ "step": 2995
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.021980790144828e-06,
+ "loss": 0.9054,
+ "step": 2996
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015872225839776e-06,
+ "loss": 0.8296,
+ "step": 2997
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.009764432147086e-06,
+ "loss": 0.8689,
+ "step": 2998
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.003657411438961e-06,
+ "loss": 0.8763,
+ "step": 2999
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.997551166087293e-06,
+ "loss": 0.861,
+ "step": 3000
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.991445698463672e-06,
+ "loss": 0.8845,
+ "step": 3001
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.985341010939402e-06,
+ "loss": 0.8727,
+ "step": 3002
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.979237105885467e-06,
+ "loss": 0.8588,
+ "step": 3003
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.973133985672558e-06,
+ "loss": 0.8522,
+ "step": 3004
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.967031652671051e-06,
+ "loss": 0.8947,
+ "step": 3005
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.960930109251023e-06,
+ "loss": 0.8376,
+ "step": 3006
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.954829357782243e-06,
+ "loss": 0.3353,
+ "step": 3007
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.948729400634178e-06,
+ "loss": 0.8328,
+ "step": 3008
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.942630240175977e-06,
+ "loss": 0.8947,
+ "step": 3009
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.936531878776484e-06,
+ "loss": 0.8339,
+ "step": 3010
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.930434318804229e-06,
+ "loss": 0.7724,
+ "step": 3011
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.924337562627435e-06,
+ "loss": 0.8101,
+ "step": 3012
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.918241612614016e-06,
+ "loss": 0.9097,
+ "step": 3013
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.91214647113157e-06,
+ "loss": 0.8057,
+ "step": 3014
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.906052140547373e-06,
+ "loss": 0.8494,
+ "step": 3015
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.899958623228398e-06,
+ "loss": 0.8431,
+ "step": 3016
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.893865921541294e-06,
+ "loss": 0.8113,
+ "step": 3017
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.887774037852395e-06,
+ "loss": 0.8479,
+ "step": 3018
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.881682974527723e-06,
+ "loss": 0.8474,
+ "step": 3019
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.875592733932972e-06,
+ "loss": 0.8328,
+ "step": 3020
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.869503318433529e-06,
+ "loss": 0.8615,
+ "step": 3021
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.863414730394444e-06,
+ "loss": 0.869,
+ "step": 3022
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.857326972180455e-06,
+ "loss": 0.8617,
+ "step": 3023
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.85124004615598e-06,
+ "loss": 0.9011,
+ "step": 3024
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.845153954685114e-06,
+ "loss": 0.9741,
+ "step": 3025
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.839068700131623e-06,
+ "loss": 0.8368,
+ "step": 3026
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.832984284858946e-06,
+ "loss": 0.8543,
+ "step": 3027
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.826900711230204e-06,
+ "loss": 0.8758,
+ "step": 3028
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.820817981608185e-06,
+ "loss": 0.8248,
+ "step": 3029
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.814736098355348e-06,
+ "loss": 0.8318,
+ "step": 3030
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.808655063833832e-06,
+ "loss": 0.885,
+ "step": 3031
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.802574880405438e-06,
+ "loss": 0.8436,
+ "step": 3032
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.79649555043164e-06,
+ "loss": 0.8305,
+ "step": 3033
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.790417076273581e-06,
+ "loss": 0.8534,
+ "step": 3034
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.784339460292065e-06,
+ "loss": 0.334,
+ "step": 3035
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.77826270484757e-06,
+ "loss": 0.8972,
+ "step": 3036
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.772186812300244e-06,
+ "loss": 0.8626,
+ "step": 3037
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.766111785009888e-06,
+ "loss": 0.9063,
+ "step": 3038
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.760037625335973e-06,
+ "loss": 0.8684,
+ "step": 3039
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.753964335637634e-06,
+ "loss": 0.8742,
+ "step": 3040
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.747891918273668e-06,
+ "loss": 0.8185,
+ "step": 3041
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.741820375602524e-06,
+ "loss": 0.8636,
+ "step": 3042
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.735749709982329e-06,
+ "loss": 0.8815,
+ "step": 3043
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.729679923770855e-06,
+ "loss": 0.8931,
+ "step": 3044
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.723611019325538e-06,
+ "loss": 0.7918,
+ "step": 3045
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.71754299900347e-06,
+ "loss": 0.8457,
+ "step": 3046
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.7114758651614e-06,
+ "loss": 0.8989,
+ "step": 3047
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.705409620155733e-06,
+ "loss": 0.8857,
+ "step": 3048
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.699344266342529e-06,
+ "loss": 0.3279,
+ "step": 3049
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.693279806077504e-06,
+ "loss": 0.8684,
+ "step": 3050
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.68721624171602e-06,
+ "loss": 0.8412,
+ "step": 3051
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.681153575613098e-06,
+ "loss": 0.8156,
+ "step": 3052
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.675091810123404e-06,
+ "loss": 0.8172,
+ "step": 3053
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.669030947601265e-06,
+ "loss": 0.8674,
+ "step": 3054
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.662970990400647e-06,
+ "loss": 0.8687,
+ "step": 3055
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.656911940875163e-06,
+ "loss": 0.8184,
+ "step": 3056
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.650853801378084e-06,
+ "loss": 0.8339,
+ "step": 3057
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.644796574262322e-06,
+ "loss": 0.8015,
+ "step": 3058
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.638740261880423e-06,
+ "loss": 0.8864,
+ "step": 3059
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.632684866584606e-06,
+ "loss": 0.9279,
+ "step": 3060
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.626630390726704e-06,
+ "loss": 0.3349,
+ "step": 3061
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.620576836658212e-06,
+ "loss": 0.3398,
+ "step": 3062
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.61452420673026e-06,
+ "loss": 0.8459,
+ "step": 3063
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.608472503293615e-06,
+ "loss": 0.8781,
+ "step": 3064
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.60242172869869e-06,
+ "loss": 0.8574,
+ "step": 3065
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.596371885295542e-06,
+ "loss": 0.8851,
+ "step": 3066
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.590322975433857e-06,
+ "loss": 0.85,
+ "step": 3067
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.584275001462961e-06,
+ "loss": 0.8472,
+ "step": 3068
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.578227965731819e-06,
+ "loss": 0.869,
+ "step": 3069
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.572181870589028e-06,
+ "loss": 0.8681,
+ "step": 3070
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.566136718382821e-06,
+ "loss": 0.8866,
+ "step": 3071
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.560092511461069e-06,
+ "loss": 0.8623,
+ "step": 3072
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.55404925217127e-06,
+ "loss": 0.8846,
+ "step": 3073
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.548006942860557e-06,
+ "loss": 0.8178,
+ "step": 3074
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.541965585875695e-06,
+ "loss": 0.8941,
+ "step": 3075
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.535925183563073e-06,
+ "loss": 0.3444,
+ "step": 3076
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.529885738268714e-06,
+ "loss": 0.817,
+ "step": 3077
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.523847252338274e-06,
+ "loss": 0.8324,
+ "step": 3078
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.51780972811703e-06,
+ "loss": 0.8993,
+ "step": 3079
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.511773167949885e-06,
+ "loss": 0.9098,
+ "step": 3080
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5057375741813685e-06,
+ "loss": 0.3501,
+ "step": 3081
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.499702949155634e-06,
+ "loss": 0.8686,
+ "step": 3082
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.493669295216467e-06,
+ "loss": 0.9073,
+ "step": 3083
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.487636614707265e-06,
+ "loss": 0.805,
+ "step": 3084
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.48160490997105e-06,
+ "loss": 0.8428,
+ "step": 3085
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.475574183350471e-06,
+ "loss": 0.8743,
+ "step": 3086
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.46954443718779e-06,
+ "loss": 0.9022,
+ "step": 3087
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.463515673824888e-06,
+ "loss": 0.3609,
+ "step": 3088
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.457487895603273e-06,
+ "loss": 0.3168,
+ "step": 3089
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.451461104864061e-06,
+ "loss": 0.9262,
+ "step": 3090
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.44543530394799e-06,
+ "loss": 0.3612,
+ "step": 3091
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.439410495195411e-06,
+ "loss": 0.8621,
+ "step": 3092
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.433386680946288e-06,
+ "loss": 0.8525,
+ "step": 3093
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.427363863540202e-06,
+ "loss": 0.8305,
+ "step": 3094
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.421342045316351e-06,
+ "loss": 0.87,
+ "step": 3095
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.415321228613534e-06,
+ "loss": 0.8576,
+ "step": 3096
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.409301415770168e-06,
+ "loss": 0.8915,
+ "step": 3097
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.403282609124281e-06,
+ "loss": 0.9088,
+ "step": 3098
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.397264811013507e-06,
+ "loss": 0.8795,
+ "step": 3099
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.391248023775084e-06,
+ "loss": 0.9255,
+ "step": 3100
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.385232249745873e-06,
+ "loss": 0.8656,
+ "step": 3101
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.379217491262325e-06,
+ "loss": 0.868,
+ "step": 3102
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.373203750660505e-06,
+ "loss": 0.8748,
+ "step": 3103
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.36719103027608e-06,
+ "loss": 0.9025,
+ "step": 3104
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.361179332444318e-06,
+ "loss": 0.8386,
+ "step": 3105
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.355168659500094e-06,
+ "loss": 0.9597,
+ "step": 3106
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3491590137778915e-06,
+ "loss": 0.797,
+ "step": 3107
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.343150397611782e-06,
+ "loss": 0.9354,
+ "step": 3108
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3371428133354435e-06,
+ "loss": 0.8112,
+ "step": 3109
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.33113626328215e-06,
+ "loss": 0.8546,
+ "step": 3110
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.325130749784781e-06,
+ "loss": 0.8515,
+ "step": 3111
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3191262751758005e-06,
+ "loss": 0.8413,
+ "step": 3112
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3131228417872905e-06,
+ "loss": 0.8536,
+ "step": 3113
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.307120451950902e-06,
+ "loss": 0.8237,
+ "step": 3114
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.301119107997905e-06,
+ "loss": 0.8614,
+ "step": 3115
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.295118812259145e-06,
+ "loss": 0.8469,
+ "step": 3116
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.289119567065068e-06,
+ "loss": 0.8548,
+ "step": 3117
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2831213747457155e-06,
+ "loss": 0.859,
+ "step": 3118
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2771242376307125e-06,
+ "loss": 0.8098,
+ "step": 3119
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.271128158049283e-06,
+ "loss": 0.9228,
+ "step": 3120
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2651331383302326e-06,
+ "loss": 0.8379,
+ "step": 3121
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2591391808019555e-06,
+ "loss": 0.9027,
+ "step": 3122
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.253146287792434e-06,
+ "loss": 0.844,
+ "step": 3123
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.247154461629248e-06,
+ "loss": 0.8871,
+ "step": 3124
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.241163704639547e-06,
+ "loss": 0.8171,
+ "step": 3125
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.235174019150071e-06,
+ "loss": 0.8791,
+ "step": 3126
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.229185407487149e-06,
+ "loss": 0.3224,
+ "step": 3127
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2231978719766884e-06,
+ "loss": 0.8307,
+ "step": 3128
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.217211414944171e-06,
+ "loss": 0.8704,
+ "step": 3129
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2112260387146784e-06,
+ "loss": 0.7989,
+ "step": 3130
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2052417456128565e-06,
+ "loss": 0.8623,
+ "step": 3131
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.199258537962936e-06,
+ "loss": 0.8888,
+ "step": 3132
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.193276418088729e-06,
+ "loss": 0.8629,
+ "step": 3133
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.187295388313618e-06,
+ "loss": 0.8836,
+ "step": 3134
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.181315450960562e-06,
+ "loss": 0.9025,
+ "step": 3135
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.175336608352113e-06,
+ "loss": 0.8653,
+ "step": 3136
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.169358862810374e-06,
+ "loss": 0.9051,
+ "step": 3137
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.163382216657033e-06,
+ "loss": 0.849,
+ "step": 3138
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.1574066722133565e-06,
+ "loss": 0.8447,
+ "step": 3139
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.151432231800173e-06,
+ "loss": 0.8223,
+ "step": 3140
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.145458897737882e-06,
+ "loss": 0.8434,
+ "step": 3141
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.139486672346466e-06,
+ "loss": 0.8406,
+ "step": 3142
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.133515557945463e-06,
+ "loss": 0.8718,
+ "step": 3143
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.12754555685399e-06,
+ "loss": 0.8422,
+ "step": 3144
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.121576671390722e-06,
+ "loss": 0.836,
+ "step": 3145
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.115608903873905e-06,
+ "loss": 0.8781,
+ "step": 3146
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.109642256621353e-06,
+ "loss": 0.928,
+ "step": 3147
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.103676731950443e-06,
+ "loss": 0.7649,
+ "step": 3148
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0977123321781176e-06,
+ "loss": 0.8946,
+ "step": 3149
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.091749059620881e-06,
+ "loss": 0.8437,
+ "step": 3150
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0857869165947945e-06,
+ "loss": 0.8807,
+ "step": 3151
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.079825905415491e-06,
+ "loss": 0.8756,
+ "step": 3152
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.073866028398153e-06,
+ "loss": 0.8937,
+ "step": 3153
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.067907287857535e-06,
+ "loss": 0.8357,
+ "step": 3154
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.061949686107938e-06,
+ "loss": 0.8342,
+ "step": 3155
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0559932254632315e-06,
+ "loss": 0.8986,
+ "step": 3156
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0500379082368305e-06,
+ "loss": 0.8572,
+ "step": 3157
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.044083736741711e-06,
+ "loss": 0.8588,
+ "step": 3158
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.03813071329041e-06,
+ "loss": 0.9037,
+ "step": 3159
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.032178840195009e-06,
+ "loss": 0.8254,
+ "step": 3160
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.026228119767149e-06,
+ "loss": 0.8,
+ "step": 3161
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.020278554318023e-06,
+ "loss": 0.8953,
+ "step": 3162
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.014330146158367e-06,
+ "loss": 0.8366,
+ "step": 3163
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.008382897598477e-06,
+ "loss": 0.3328,
+ "step": 3164
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.002436810948201e-06,
+ "loss": 0.8143,
+ "step": 3165
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.996491888516927e-06,
+ "loss": 0.9043,
+ "step": 3166
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.990548132613592e-06,
+ "loss": 0.8755,
+ "step": 3167
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.984605545546686e-06,
+ "loss": 0.3453,
+ "step": 3168
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.978664129624241e-06,
+ "loss": 0.8433,
+ "step": 3169
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.972723887153828e-06,
+ "loss": 0.8978,
+ "step": 3170
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9667848204425785e-06,
+ "loss": 0.8247,
+ "step": 3171
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.960846931797152e-06,
+ "loss": 0.8902,
+ "step": 3172
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9549102235237565e-06,
+ "loss": 0.3355,
+ "step": 3173
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.948974697928144e-06,
+ "loss": 0.8995,
+ "step": 3174
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.943040357315598e-06,
+ "loss": 0.7678,
+ "step": 3175
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9371072039909515e-06,
+ "loss": 0.9278,
+ "step": 3176
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.931175240258576e-06,
+ "loss": 0.8104,
+ "step": 3177
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9252444684223765e-06,
+ "loss": 0.7976,
+ "step": 3178
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.919314890785793e-06,
+ "loss": 0.8439,
+ "step": 3179
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.913386509651807e-06,
+ "loss": 0.7705,
+ "step": 3180
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.907459327322934e-06,
+ "loss": 0.8465,
+ "step": 3181
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.90153334610122e-06,
+ "loss": 0.8224,
+ "step": 3182
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.895608568288255e-06,
+ "loss": 0.8401,
+ "step": 3183
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.889684996185148e-06,
+ "loss": 0.8788,
+ "step": 3184
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.88376263209255e-06,
+ "loss": 0.888,
+ "step": 3185
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.877841478310639e-06,
+ "loss": 0.8864,
+ "step": 3186
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.871921537139117e-06,
+ "loss": 0.8261,
+ "step": 3187
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.866002810877224e-06,
+ "loss": 0.818,
+ "step": 3188
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.860085301823729e-06,
+ "loss": 0.874,
+ "step": 3189
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.854169012276923e-06,
+ "loss": 0.9251,
+ "step": 3190
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.848253944534622e-06,
+ "loss": 0.8128,
+ "step": 3191
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.84234010089417e-06,
+ "loss": 0.8548,
+ "step": 3192
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.836427483652436e-06,
+ "loss": 0.3153,
+ "step": 3193
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.830516095105817e-06,
+ "loss": 0.8652,
+ "step": 3194
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.824605937550224e-06,
+ "loss": 0.9272,
+ "step": 3195
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.818697013281093e-06,
+ "loss": 0.8982,
+ "step": 3196
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.8127893245933864e-06,
+ "loss": 0.8404,
+ "step": 3197
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.806882873781579e-06,
+ "loss": 0.87,
+ "step": 3198
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.800977663139666e-06,
+ "loss": 0.3493,
+ "step": 3199
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.795073694961171e-06,
+ "loss": 0.904,
+ "step": 3200
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.789170971539119e-06,
+ "loss": 0.8607,
+ "step": 3201
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.783269495166066e-06,
+ "loss": 0.8921,
+ "step": 3202
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.777369268134076e-06,
+ "loss": 0.8598,
+ "step": 3203
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.771470292734723e-06,
+ "loss": 0.8383,
+ "step": 3204
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7655725712591055e-06,
+ "loss": 0.8726,
+ "step": 3205
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.759676105997834e-06,
+ "loss": 0.8623,
+ "step": 3206
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.753780899241027e-06,
+ "loss": 0.8679,
+ "step": 3207
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.747886953278311e-06,
+ "loss": 0.7628,
+ "step": 3208
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.741994270398826e-06,
+ "loss": 0.8452,
+ "step": 3209
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.736102852891227e-06,
+ "loss": 0.8134,
+ "step": 3210
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.730212703043666e-06,
+ "loss": 0.8137,
+ "step": 3211
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7243238231438176e-06,
+ "loss": 0.8585,
+ "step": 3212
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.718436215478849e-06,
+ "loss": 0.8581,
+ "step": 3213
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.712549882335442e-06,
+ "loss": 0.8949,
+ "step": 3214
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.70666482599978e-06,
+ "loss": 0.8793,
+ "step": 3215
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7007810487575475e-06,
+ "loss": 0.8935,
+ "step": 3216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.694898552893941e-06,
+ "loss": 0.8296,
+ "step": 3217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6890173406936485e-06,
+ "loss": 0.8294,
+ "step": 3218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.683137414440872e-06,
+ "loss": 0.8371,
+ "step": 3219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.677258776419304e-06,
+ "loss": 0.8256,
+ "step": 3220
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.671381428912138e-06,
+ "loss": 0.8522,
+ "step": 3221
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.66550537420207e-06,
+ "loss": 0.82,
+ "step": 3222
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.659630614571287e-06,
+ "loss": 0.8246,
+ "step": 3223
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.653757152301488e-06,
+ "loss": 0.8773,
+ "step": 3224
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.647884989673849e-06,
+ "loss": 0.3202,
+ "step": 3225
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.642014128969055e-06,
+ "loss": 0.8579,
+ "step": 3226
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.63614457246728e-06,
+ "loss": 0.9022,
+ "step": 3227
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.630276322448188e-06,
+ "loss": 0.8407,
+ "step": 3228
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.624409381190946e-06,
+ "loss": 0.8329,
+ "step": 3229
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.618543750974202e-06,
+ "loss": 0.8275,
+ "step": 3230
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6126794340761025e-06,
+ "loss": 0.828,
+ "step": 3231
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.606816432774279e-06,
+ "loss": 0.8471,
+ "step": 3232
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.600954749345851e-06,
+ "loss": 0.9396,
+ "step": 3233
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.595094386067428e-06,
+ "loss": 0.9392,
+ "step": 3234
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.589235345215117e-06,
+ "loss": 0.8406,
+ "step": 3235
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.583377629064494e-06,
+ "loss": 0.8983,
+ "step": 3236
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5775212398906295e-06,
+ "loss": 0.8974,
+ "step": 3237
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.571666179968079e-06,
+ "loss": 0.8073,
+ "step": 3238
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.565812451570881e-06,
+ "loss": 0.8561,
+ "step": 3239
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5599600569725495e-06,
+ "loss": 0.8568,
+ "step": 3240
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.554108998446096e-06,
+ "loss": 0.8746,
+ "step": 3241
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.548259278263999e-06,
+ "loss": 0.8681,
+ "step": 3242
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.542410898698226e-06,
+ "loss": 0.8507,
+ "step": 3243
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.536563862020218e-06,
+ "loss": 0.8954,
+ "step": 3244
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.530718170500896e-06,
+ "loss": 0.9302,
+ "step": 3245
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.524873826410658e-06,
+ "loss": 0.8068,
+ "step": 3246
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.519030832019383e-06,
+ "loss": 0.8184,
+ "step": 3247
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.513189189596422e-06,
+ "loss": 0.8974,
+ "step": 3248
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.507348901410604e-06,
+ "loss": 0.8233,
+ "step": 3249
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.501509969730224e-06,
+ "loss": 0.7774,
+ "step": 3250
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.495672396823061e-06,
+ "loss": 0.818,
+ "step": 3251
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.489836184956353e-06,
+ "loss": 0.8892,
+ "step": 3252
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.484001336396828e-06,
+ "loss": 0.2975,
+ "step": 3253
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.478167853410668e-06,
+ "loss": 0.9204,
+ "step": 3254
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.472335738263534e-06,
+ "loss": 0.8423,
+ "step": 3255
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.466504993220548e-06,
+ "loss": 0.8543,
+ "step": 3256
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.460675620546305e-06,
+ "loss": 0.7984,
+ "step": 3257
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.454847622504867e-06,
+ "loss": 0.8454,
+ "step": 3258
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4490210013597635e-06,
+ "loss": 0.8717,
+ "step": 3259
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4431957593739845e-06,
+ "loss": 0.868,
+ "step": 3260
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4373718988099896e-06,
+ "loss": 0.873,
+ "step": 3261
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.431549421929694e-06,
+ "loss": 0.878,
+ "step": 3262
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4257283309944804e-06,
+ "loss": 0.9071,
+ "step": 3263
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.419908628265203e-06,
+ "loss": 0.7793,
+ "step": 3264
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.414090316002161e-06,
+ "loss": 0.9329,
+ "step": 3265
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4082733964651166e-06,
+ "loss": 0.8399,
+ "step": 3266
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4024578719133e-06,
+ "loss": 0.8176,
+ "step": 3267
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.396643744605391e-06,
+ "loss": 0.3596,
+ "step": 3268
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.390831016799527e-06,
+ "loss": 0.8651,
+ "step": 3269
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.385019690753311e-06,
+ "loss": 0.8239,
+ "step": 3270
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.379209768723791e-06,
+ "loss": 0.8517,
+ "step": 3271
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.373401252967475e-06,
+ "loss": 0.858,
+ "step": 3272
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.367594145740324e-06,
+ "loss": 0.8358,
+ "step": 3273
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.361788449297748e-06,
+ "loss": 0.8278,
+ "step": 3274
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.355984165894613e-06,
+ "loss": 0.8426,
+ "step": 3275
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.350181297785242e-06,
+ "loss": 0.8728,
+ "step": 3276
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.344379847223398e-06,
+ "loss": 0.8779,
+ "step": 3277
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.338579816462298e-06,
+ "loss": 0.8403,
+ "step": 3278
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.332781207754605e-06,
+ "loss": 0.9416,
+ "step": 3279
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.326984023352435e-06,
+ "loss": 0.8018,
+ "step": 3280
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.321188265507342e-06,
+ "loss": 0.8802,
+ "step": 3281
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.31539393647034e-06,
+ "loss": 0.8872,
+ "step": 3282
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.309601038491874e-06,
+ "loss": 0.8388,
+ "step": 3283
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.303809573821842e-06,
+ "loss": 0.8846,
+ "step": 3284
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.298019544709579e-06,
+ "loss": 0.9091,
+ "step": 3285
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.292230953403866e-06,
+ "loss": 0.8256,
+ "step": 3286
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.286443802152926e-06,
+ "loss": 0.8003,
+ "step": 3287
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.280658093204422e-06,
+ "loss": 0.8543,
+ "step": 3288
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.274873828805459e-06,
+ "loss": 0.8514,
+ "step": 3289
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.269091011202576e-06,
+ "loss": 0.8885,
+ "step": 3290
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.263309642641751e-06,
+ "loss": 0.8813,
+ "step": 3291
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.257529725368405e-06,
+ "loss": 0.8033,
+ "step": 3292
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.251751261627386e-06,
+ "loss": 0.8435,
+ "step": 3293
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.245974253662988e-06,
+ "loss": 0.8202,
+ "step": 3294
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.240198703718932e-06,
+ "loss": 0.8932,
+ "step": 3295
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.234424614038375e-06,
+ "loss": 0.8381,
+ "step": 3296
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2286519868639095e-06,
+ "loss": 0.8191,
+ "step": 3297
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.222880824437549e-06,
+ "loss": 0.8796,
+ "step": 3298
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.217111129000759e-06,
+ "loss": 0.9039,
+ "step": 3299
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.211342902794413e-06,
+ "loss": 0.9122,
+ "step": 3300
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.205576148058828e-06,
+ "loss": 0.8211,
+ "step": 3301
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.199810867033745e-06,
+ "loss": 0.8594,
+ "step": 3302
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.19404706195833e-06,
+ "loss": 0.849,
+ "step": 3303
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.188284735071177e-06,
+ "loss": 0.8311,
+ "step": 3304
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.182523888610316e-06,
+ "loss": 0.8589,
+ "step": 3305
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.176764524813187e-06,
+ "loss": 0.8491,
+ "step": 3306
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.171006645916662e-06,
+ "loss": 0.858,
+ "step": 3307
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.165250254157032e-06,
+ "loss": 0.8774,
+ "step": 3308
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.159495351770017e-06,
+ "loss": 0.8346,
+ "step": 3309
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.153741940990749e-06,
+ "loss": 0.8981,
+ "step": 3310
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1479900240537956e-06,
+ "loss": 0.8917,
+ "step": 3311
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.142239603193128e-06,
+ "loss": 0.8623,
+ "step": 3312
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.136490680642146e-06,
+ "loss": 0.903,
+ "step": 3313
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.130743258633667e-06,
+ "loss": 0.336,
+ "step": 3314
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.124997339399916e-06,
+ "loss": 0.8925,
+ "step": 3315
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.119252925172549e-06,
+ "loss": 0.8299,
+ "step": 3316
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.113510018182628e-06,
+ "loss": 0.856,
+ "step": 3317
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.107768620660633e-06,
+ "loss": 0.851,
+ "step": 3318
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.102028734836456e-06,
+ "loss": 0.8333,
+ "step": 3319
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0962903629394e-06,
+ "loss": 0.8182,
+ "step": 3320
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.090553507198187e-06,
+ "loss": 0.9085,
+ "step": 3321
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0848181698409384e-06,
+ "loss": 0.736,
+ "step": 3322
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.079084353095202e-06,
+ "loss": 0.8301,
+ "step": 3323
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.07335205918792e-06,
+ "loss": 0.8716,
+ "step": 3324
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.067621290345455e-06,
+ "loss": 0.8847,
+ "step": 3325
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.061892048793568e-06,
+ "loss": 0.7946,
+ "step": 3326
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.056164336757426e-06,
+ "loss": 0.8706,
+ "step": 3327
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.050438156461613e-06,
+ "loss": 0.9453,
+ "step": 3328
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.044713510130108e-06,
+ "loss": 0.8009,
+ "step": 3329
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.038990399986302e-06,
+ "loss": 0.8799,
+ "step": 3330
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.03326882825298e-06,
+ "loss": 0.7933,
+ "step": 3331
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.027548797152336e-06,
+ "loss": 0.8408,
+ "step": 3332
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.021830308905963e-06,
+ "loss": 0.8578,
+ "step": 3333
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.016113365734861e-06,
+ "loss": 0.8337,
+ "step": 3334
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0103979698594215e-06,
+ "loss": 0.8569,
+ "step": 3335
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.004684123499436e-06,
+ "loss": 0.8161,
+ "step": 3336
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.998971828874102e-06,
+ "loss": 0.8486,
+ "step": 3337
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.993261088202005e-06,
+ "loss": 0.8308,
+ "step": 3338
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.987551903701128e-06,
+ "loss": 0.3323,
+ "step": 3339
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9818442775888595e-06,
+ "loss": 0.8296,
+ "step": 3340
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.97613821208197e-06,
+ "loss": 0.822,
+ "step": 3341
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.970433709396635e-06,
+ "loss": 0.8829,
+ "step": 3342
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.964730771748415e-06,
+ "loss": 0.8187,
+ "step": 3343
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.959029401352262e-06,
+ "loss": 0.8496,
+ "step": 3344
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.953329600422524e-06,
+ "loss": 0.7952,
+ "step": 3345
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.947631371172943e-06,
+ "loss": 0.346,
+ "step": 3346
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.941934715816642e-06,
+ "loss": 0.7867,
+ "step": 3347
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.936239636566137e-06,
+ "loss": 0.8678,
+ "step": 3348
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.930546135633327e-06,
+ "loss": 0.8968,
+ "step": 3349
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.924854215229509e-06,
+ "loss": 0.8541,
+ "step": 3350
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.919163877565351e-06,
+ "loss": 0.8464,
+ "step": 3351
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9134751248509236e-06,
+ "loss": 0.8757,
+ "step": 3352
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.9077879592956675e-06,
+ "loss": 0.8348,
+ "step": 3353
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.902102383108415e-06,
+ "loss": 0.8805,
+ "step": 3354
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.896418398497377e-06,
+ "loss": 0.8828,
+ "step": 3355
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.890736007670144e-06,
+ "loss": 0.8602,
+ "step": 3356
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.885055212833696e-06,
+ "loss": 0.87,
+ "step": 3357
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.879376016194387e-06,
+ "loss": 0.3235,
+ "step": 3358
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.873698419957952e-06,
+ "loss": 0.8781,
+ "step": 3359
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8680224263295045e-06,
+ "loss": 0.8274,
+ "step": 3360
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.862348037513533e-06,
+ "loss": 0.8665,
+ "step": 3361
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.856675255713905e-06,
+ "loss": 0.8574,
+ "step": 3362
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.851004083133862e-06,
+ "loss": 0.8545,
+ "step": 3363
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8453345219760275e-06,
+ "loss": 0.8558,
+ "step": 3364
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.839666574442389e-06,
+ "loss": 0.3386,
+ "step": 3365
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.834000242734317e-06,
+ "loss": 0.8726,
+ "step": 3366
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.828335529052541e-06,
+ "loss": 0.8696,
+ "step": 3367
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.822672435597172e-06,
+ "loss": 0.9215,
+ "step": 3368
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.817010964567702e-06,
+ "loss": 0.8781,
+ "step": 3369
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.811351118162969e-06,
+ "loss": 0.3622,
+ "step": 3370
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.805692898581196e-06,
+ "loss": 0.8506,
+ "step": 3371
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.800036308019974e-06,
+ "loss": 0.8837,
+ "step": 3372
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.79438134867625e-06,
+ "loss": 0.9072,
+ "step": 3373
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.788728022746348e-06,
+ "loss": 0.9234,
+ "step": 3374
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.783076332425957e-06,
+ "loss": 0.8267,
+ "step": 3375
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.777426279910125e-06,
+ "loss": 0.8599,
+ "step": 3376
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.771777867393275e-06,
+ "loss": 0.3439,
+ "step": 3377
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.766131097069174e-06,
+ "loss": 0.8548,
+ "step": 3378
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.760485971130969e-06,
+ "loss": 0.8778,
+ "step": 3379
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7548424917711596e-06,
+ "loss": 0.7819,
+ "step": 3380
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.749200661181611e-06,
+ "loss": 0.8727,
+ "step": 3381
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7435604815535475e-06,
+ "loss": 0.8025,
+ "step": 3382
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7379219550775415e-06,
+ "loss": 0.8451,
+ "step": 3383
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.732285083943537e-06,
+ "loss": 0.8951,
+ "step": 3384
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.726649870340833e-06,
+ "loss": 0.8674,
+ "step": 3385
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.721016316458068e-06,
+ "loss": 0.8689,
+ "step": 3386
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.715384424483268e-06,
+ "loss": 0.8465,
+ "step": 3387
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.709754196603781e-06,
+ "loss": 0.9091,
+ "step": 3388
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.704125635006329e-06,
+ "loss": 0.9167,
+ "step": 3389
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6984987418769825e-06,
+ "loss": 0.7812,
+ "step": 3390
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.692873519401154e-06,
+ "loss": 0.789,
+ "step": 3391
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6872499697636195e-06,
+ "loss": 0.8379,
+ "step": 3392
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.681628095148502e-06,
+ "loss": 0.8154,
+ "step": 3393
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6760078977392706e-06,
+ "loss": 0.8373,
+ "step": 3394
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.67038937971875e-06,
+ "loss": 0.828,
+ "step": 3395
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.664772543269101e-06,
+ "loss": 0.3334,
+ "step": 3396
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.659157390571842e-06,
+ "loss": 0.8677,
+ "step": 3397
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.653543923807833e-06,
+ "loss": 0.3524,
+ "step": 3398
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6479321451572785e-06,
+ "loss": 0.916,
+ "step": 3399
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.642322056799732e-06,
+ "loss": 0.7922,
+ "step": 3400
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.636713660914087e-06,
+ "loss": 0.8673,
+ "step": 3401
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.631106959678575e-06,
+ "loss": 0.8586,
+ "step": 3402
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.625501955270777e-06,
+ "loss": 0.835,
+ "step": 3403
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.619898649867612e-06,
+ "loss": 0.8417,
+ "step": 3404
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.614297045645339e-06,
+ "loss": 0.3454,
+ "step": 3405
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.6086971447795625e-06,
+ "loss": 0.3055,
+ "step": 3406
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.603098949445209e-06,
+ "loss": 0.8493,
+ "step": 3407
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.597502461816557e-06,
+ "loss": 0.3029,
+ "step": 3408
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5919076840672215e-06,
+ "loss": 0.8135,
+ "step": 3409
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5863146183701454e-06,
+ "loss": 0.7894,
+ "step": 3410
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.580723266897616e-06,
+ "loss": 0.8846,
+ "step": 3411
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.575133631821243e-06,
+ "loss": 0.7988,
+ "step": 3412
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5695457153119806e-06,
+ "loss": 0.8416,
+ "step": 3413
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.563959519540114e-06,
+ "loss": 0.7931,
+ "step": 3414
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.558375046675244e-06,
+ "loss": 0.907,
+ "step": 3415
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.552792298886335e-06,
+ "loss": 0.9054,
+ "step": 3416
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.547211278341646e-06,
+ "loss": 0.7543,
+ "step": 3417
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.541631987208789e-06,
+ "loss": 0.8198,
+ "step": 3418
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.536054427654698e-06,
+ "loss": 0.8264,
+ "step": 3419
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.530478601845624e-06,
+ "loss": 0.8012,
+ "step": 3420
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.52490451194716e-06,
+ "loss": 0.8178,
+ "step": 3421
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.519332160124215e-06,
+ "loss": 0.3198,
+ "step": 3422
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.513761548541032e-06,
+ "loss": 0.9327,
+ "step": 3423
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.508192679361169e-06,
+ "loss": 0.8053,
+ "step": 3424
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.502625554747508e-06,
+ "loss": 0.7713,
+ "step": 3425
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.497060176862259e-06,
+ "loss": 0.8214,
+ "step": 3426
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.491496547866948e-06,
+ "loss": 0.7932,
+ "step": 3427
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.485934669922428e-06,
+ "loss": 0.8353,
+ "step": 3428
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.480374545188866e-06,
+ "loss": 0.3577,
+ "step": 3429
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.474816175825754e-06,
+ "loss": 0.8695,
+ "step": 3430
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.469259563991894e-06,
+ "loss": 0.8552,
+ "step": 3431
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.46370471184541e-06,
+ "loss": 0.8456,
+ "step": 3432
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.458151621543744e-06,
+ "loss": 0.9141,
+ "step": 3433
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.452600295243653e-06,
+ "loss": 0.9056,
+ "step": 3434
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4470507351012116e-06,
+ "loss": 0.9111,
+ "step": 3435
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.441502943271797e-06,
+ "loss": 0.9325,
+ "step": 3436
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4359569219101115e-06,
+ "loss": 0.9181,
+ "step": 3437
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.430412673170167e-06,
+ "loss": 0.8205,
+ "step": 3438
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.424870199205283e-06,
+ "loss": 0.8596,
+ "step": 3439
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4193295021681e-06,
+ "loss": 0.9005,
+ "step": 3440
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.413790584210551e-06,
+ "loss": 0.8579,
+ "step": 3441
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.408253447483892e-06,
+ "loss": 0.8601,
+ "step": 3442
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.402718094138688e-06,
+ "loss": 0.9208,
+ "step": 3443
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.397184526324792e-06,
+ "loss": 0.8436,
+ "step": 3444
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.391652746191398e-06,
+ "loss": 0.8439,
+ "step": 3445
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.38612275588697e-06,
+ "loss": 0.7665,
+ "step": 3446
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.380594557559298e-06,
+ "loss": 0.8656,
+ "step": 3447
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.375068153355474e-06,
+ "loss": 0.8149,
+ "step": 3448
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.369543545421883e-06,
+ "loss": 0.8781,
+ "step": 3449
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.364020735904223e-06,
+ "loss": 0.8067,
+ "step": 3450
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.358499726947488e-06,
+ "loss": 0.8811,
+ "step": 3451
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.352980520695974e-06,
+ "loss": 0.763,
+ "step": 3452
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.347463119293283e-06,
+ "loss": 0.8323,
+ "step": 3453
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.341947524882301e-06,
+ "loss": 0.8524,
+ "step": 3454
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.336433739605227e-06,
+ "loss": 0.9055,
+ "step": 3455
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.330921765603549e-06,
+ "loss": 0.8241,
+ "step": 3456
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.325411605018056e-06,
+ "loss": 0.8678,
+ "step": 3457
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.31990325998883e-06,
+ "loss": 0.8328,
+ "step": 3458
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.314396732655253e-06,
+ "loss": 0.8313,
+ "step": 3459
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.308892025155989e-06,
+ "loss": 0.8711,
+ "step": 3460
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.303389139629007e-06,
+ "loss": 0.7765,
+ "step": 3461
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.297888078211564e-06,
+ "loss": 0.885,
+ "step": 3462
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2923888430402085e-06,
+ "loss": 0.8592,
+ "step": 3463
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.286891436250785e-06,
+ "loss": 0.8756,
+ "step": 3464
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.281395859978414e-06,
+ "loss": 0.8556,
+ "step": 3465
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2759021163575184e-06,
+ "loss": 0.8376,
+ "step": 3466
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.27041020752181e-06,
+ "loss": 0.2913,
+ "step": 3467
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.26492013560427e-06,
+ "loss": 0.8745,
+ "step": 3468
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.259431902737195e-06,
+ "loss": 0.8498,
+ "step": 3469
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2539455110521385e-06,
+ "loss": 0.8955,
+ "step": 3470
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.248460962679958e-06,
+ "loss": 0.8801,
+ "step": 3471
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.24297825975079e-06,
+ "loss": 0.7879,
+ "step": 3472
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.237497404394044e-06,
+ "loss": 0.8679,
+ "step": 3473
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.232018398738436e-06,
+ "loss": 0.3282,
+ "step": 3474
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.226541244911936e-06,
+ "loss": 0.8045,
+ "step": 3475
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.221065945041811e-06,
+ "loss": 0.8464,
+ "step": 3476
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.215592501254609e-06,
+ "loss": 0.8123,
+ "step": 3477
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.210120915676147e-06,
+ "loss": 0.8216,
+ "step": 3478
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2046511904315265e-06,
+ "loss": 0.8619,
+ "step": 3479
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.199183327645128e-06,
+ "loss": 0.8638,
+ "step": 3480
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.193717329440604e-06,
+ "loss": 0.7898,
+ "step": 3481
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.188253197940889e-06,
+ "loss": 0.8806,
+ "step": 3482
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.182790935268185e-06,
+ "loss": 0.8285,
+ "step": 3483
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.177330543543971e-06,
+ "loss": 0.8436,
+ "step": 3484
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.171872024889004e-06,
+ "loss": 0.8741,
+ "step": 3485
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.166415381423306e-06,
+ "loss": 0.8587,
+ "step": 3486
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.160960615266179e-06,
+ "loss": 0.8214,
+ "step": 3487
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.155507728536191e-06,
+ "loss": 0.8445,
+ "step": 3488
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.150056723351173e-06,
+ "loss": 0.8296,
+ "step": 3489
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.14460760182824e-06,
+ "loss": 0.843,
+ "step": 3490
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.139160366083765e-06,
+ "loss": 0.811,
+ "step": 3491
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.133715018233393e-06,
+ "loss": 0.8927,
+ "step": 3492
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.128271560392037e-06,
+ "loss": 0.8619,
+ "step": 3493
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.122829994673866e-06,
+ "loss": 0.8609,
+ "step": 3494
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.117390323192326e-06,
+ "loss": 0.7798,
+ "step": 3495
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.111952548060126e-06,
+ "loss": 0.8506,
+ "step": 3496
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.106516671389224e-06,
+ "loss": 0.8488,
+ "step": 3497
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.101082695290866e-06,
+ "loss": 0.8173,
+ "step": 3498
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.0956506218755344e-06,
+ "loss": 0.8619,
+ "step": 3499
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.09022045325299e-06,
+ "loss": 0.8947,
+ "step": 3500
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5197,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 500,
+ "total_flos": 9629812501774336.0,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/training_args.bin b/llava-v1.5-7b-concat-16/checkpoint-3500/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..b5632db25e85f4a6440989c9cf4e5eb45e67ccd6
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e1397f63ab71c83d4546fd5cc220108e4e3680c17b2f7501e2a09ab729de344
+size 6712
diff --git a/llava-v1.5-7b-concat-16/checkpoint-3500/zero_to_fp32.py b/llava-v1.5-7b-concat-16/checkpoint-3500/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..c98caae31534368be22b67fc4ae906836c992a8d
--- /dev/null
+++ b/llava-v1.5-7b-concat-16/checkpoint-3500/zero_to_fp32.py
@@ -0,0 +1,587 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, args.output_file, tag=args.tag)
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..7b6f6f619c0ec12218608bf36962fb3c5b64bb4a
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eba118f91134c0e4126f835bae6335737b02b45b7541d724cc83539c568352cb
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..272f6cb2dfc2f67f7cb523e06d9d5f143f7aaf4f
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:487970b8c8d8324222ece4d60c1b5c16119dae97d6f3e92819cf8cdbbb582389
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..278371a04d293e0468aedfcf3c26d1faaf38df9b
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2000/global_step2000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bd67ae6267c15f001e686a2a84aea25f29e4c2981c37a42008d88e85480c133b
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..958b7cb20f2a5a87a193c85db745d737c2ffe593
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ac8d43cb77bb17766c9a3888803a11052ab4d4a5d7becdbbcd59d957acdefb51
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..1049298562a804dd5b44dd122d5d89093097ba38
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b9deae3b322767b6b576873cc89b9305b9d92cdc1f77710ad576c1623eeaf0
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..858457bea0bd91ed953d14464911f9749350fc53
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:48c6ceec8b2dc69829976c4fadf28b32c0c365210c1297680ec7f945f1b51ccd
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b9de41f184e16fbf45d9ab8695711a46c7b8ae21
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-2500/global_step2500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c6176eb1634817ba639b21b9118d039dadfea8339ed041aeeeb775f268fc048b
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..15d02b1a5c74481289b74ec1a1f1573cbe2fe676
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:794809dc9620120ac0e1cff52f3772cf34938ffa47ce9c26d2f64a5de83b6a38
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..122083ef3faabc9d3f140b401d4e082b32e2aeef
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c093f1ebef4799850a8466192e07eb19fda01cfc7d66662e9c577cec87f7f780
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..3fa25ee3c2e17d13a34d85299bfb96d4f401a968
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0fd170c1b36a9d673e17cea1d882544bf46e50b35e4552ddea0ceb398d4fd6a9
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..8efe6266b40d37f2bb0542a9bc195ac62605219a
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-3500/global_step3500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7fd59ab0bf835c6cd6fb2151af4e043bc48d49fcd6461218d7d73ec0cf39d589
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..513bee26cca79cfed6586367d6bc9039c36003ae
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:18dd0b07e6a81038cf14ba72b6ab79e715ee03404ae59166b0c20e1bd4e37948
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..fb5a385ff1d56c671473cf387982c659cae8fe29
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7c6a88143977354b84ce83ecec966059190a7e87f60b3b07a09a3146c60e5201
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..d33400c220a12111ac6bbfae15368ed2c0e51639
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4000/global_step4000/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c179b34f71ad229896030fe3cb919c744904337ee53807d89c88e88a60cc19ae
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..13ed5e8543dc0477f2cca5c5377e20e439eb205e
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fb784d6a12d8ad0bcd72f8d2b6c58c9782bb3d44e6de9e8cb21910e9e36a82d0
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..fdfeb386ca3b7555a290a71546fa9f740df0e030
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9528cc967e00c2e73d0b5e86205d9a736ec1894d40db9e916b758490743026e9
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..6d2745f3c8dbe491ea24fb352990843b53ba42be
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b617506a2c57025b313f1ca2a49a2d540b636bc2fb49506930e64f9657d03a0f
+size 22893670078
diff --git a/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..d70f2a1780982ca8478d83ae69c3da1b24afbc07
--- /dev/null
+++ b/llava-v1.5-7b-qwen2-16-dino/checkpoint-4500/global_step4500/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46f9317f0afa961af252d9c78d452dfabd69d716d2cee7d7854e1952bb42825b
+size 22893670078